hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c465037232783d3cca59122d250bf99eb18138f8
| 200
|
py
|
Python
|
learning/import_self_module.py
|
seasonfif/python
|
e165826526af7f1a8336b9db461abfaaed57567a
|
[
"Apache-2.0"
] | null | null | null |
learning/import_self_module.py
|
seasonfif/python
|
e165826526af7f1a8336b9db461abfaaed57567a
|
[
"Apache-2.0"
] | null | null | null |
learning/import_self_module.py
|
seasonfif/python
|
e165826526af7f1a8336b9db461abfaaed57567a
|
[
"Apache-2.0"
] | null | null | null |
# import selfmodule.print_func as pf
# pf.print_func(1,2,2,2,2,2)
# from selfmodule.print_func import print_func
# print_func(1,1,1)
from selfmodule import print_func
print_func.print_func(1,1,1,1)
| 22.222222
| 46
| 0.775
| 40
| 200
| 3.675
| 0.225
| 0.489796
| 0.204082
| 0.367347
| 0.428571
| 0.285714
| 0.285714
| 0
| 0
| 0
| 0
| 0.072626
| 0.105
| 200
| 9
| 47
| 22.222222
| 0.748603
| 0.62
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.5
| 0
| 0.5
| 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 7
|
670333b028a9e10bba68f08c063d93b0ad7c6807
| 285
|
py
|
Python
|
demos/selective_dualarm_stowing/python/selective_dualarm_stowing/datasets/__init__.py
|
pazeshun/jsk_apc
|
0ff42000ad5992f8a31e719a5360a39cf4fa1fde
|
[
"BSD-3-Clause"
] | null | null | null |
demos/selective_dualarm_stowing/python/selective_dualarm_stowing/datasets/__init__.py
|
pazeshun/jsk_apc
|
0ff42000ad5992f8a31e719a5360a39cf4fa1fde
|
[
"BSD-3-Clause"
] | 2
|
2019-04-11T05:36:23.000Z
|
2019-08-19T12:58:10.000Z
|
demos/selective_dualarm_stowing/python/selective_dualarm_stowing/datasets/__init__.py
|
pazeshun/jsk_apc
|
0ff42000ad5992f8a31e719a5360a39cf4fa1fde
|
[
"BSD-3-Clause"
] | null | null | null |
from .dataset import DualarmDatasetV1 # NOQA
from .dataset import DualarmDatasetV2 # NOQA
from .dataset import DualarmDatasetV3 # NOQA
from .dataset import DualarmDatasetV4 # NOQA
from .dataset import DualarmDatasetV5 # NOQA
from .dataset import SinglearmFailureDatasetV4 # NOQA
| 40.714286
| 54
| 0.810526
| 30
| 285
| 7.7
| 0.333333
| 0.285714
| 0.441558
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.024691
| 0.147368
| 285
| 6
| 55
| 47.5
| 0.925926
| 0.101754
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6724ba755fc599dc77a3730ddf5516857c752b55
| 51,187
|
py
|
Python
|
cyberbrain/generated/communication_pb2.py
|
testinggg-art/Cyberbrain
|
e38c74c174e23aa386d005b03f09b30aa1b3a0ae
|
[
"MIT"
] | null | null | null |
cyberbrain/generated/communication_pb2.py
|
testinggg-art/Cyberbrain
|
e38c74c174e23aa386d005b03f09b30aa1b3a0ae
|
[
"MIT"
] | null | null | null |
cyberbrain/generated/communication_pb2.py
|
testinggg-art/Cyberbrain
|
e38c74c174e23aa386d005b03f09b30aa1b3a0ae
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: communication.proto
"""Generated protocol buffer code."""
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='communication.proto',
package='',
syntax='proto2',
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_pb=b'\n\x13\x63ommunication.proto\"}\n\x05State\x12\x1d\n\x06status\x18\x01 \x01(\x0e\x32\r.State.Status\x12\x0f\n\x07message\x18\x02 \x01(\t\"D\n\x06Status\x12\x10\n\x0c\x43LIENT_READY\x10\x01\x12\x10\n\x0cSERVER_READY\x10\x02\x12\x16\n\x12\x45XECUTION_COMPLETE\x10\x03\"E\n\x0e\x43ursorPosition\x12\x10\n\x08\x66ilename\x18\x01 \x01(\t\x12\x0e\n\x06lineno\x18\x02 \x01(\x05\x12\x11\n\tcharacter\x18\x03 \x01(\x05\"\xb7\x01\n\x0c\x46rameLocater\x12\x10\n\x08\x66rame_id\x18\x01 \x01(\t\x12\x12\n\nframe_name\x18\x02 \x01(\t\x12\x10\n\x08\x66ilename\x18\x03 \x01(\t\x12\x14\n\x0cstart_lineno\x18\x04 \x01(\x03\x12\x12\n\nend_lineno\x18\x05 \x01(\x03\x12\x19\n\x11\x63\x61llsite_filename\x18\x06 \x01(\t\x12\x17\n\x0f\x63\x61llsite_lineno\x18\x07 \x01(\x03\x12\x11\n\targuments\x18\x08 \x01(\t\"9\n\x10\x46rameLocaterList\x12%\n\x0e\x66rame_locaters\x18\x01 \x03(\x0b\x32\r.FrameLocater\"\x88\x01\n\x0cInitialValue\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08\x66ilename\x18\x02 \x01(\t\x12\x0e\n\x06lineno\x18\x03 \x01(\x05\x12\r\n\x05index\x18\x06 \x01(\x03\x12\x0e\n\x06offset\x18\x07 \x01(\x05\x12\x0e\n\x06target\x18\x04 \x01(\t\x12\r\n\x05value\x18\x05 \x01(\t\x12\x0c\n\x04repr\x18\x08 \x01(\t\"\x94\x01\n\x07\x42inding\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08\x66ilename\x18\x02 \x01(\t\x12\x0e\n\x06lineno\x18\x03 \x01(\x05\x12\x0e\n\x06target\x18\x04 \x01(\t\x12\r\n\x05index\x18\x07 \x01(\x03\x12\x0e\n\x06offset\x18\x08 \x01(\x05\x12\r\n\x05value\x18\x05 \x01(\t\x12\x0c\n\x04repr\x18\t \x01(\t\x12\x0f\n\x07sources\x18\x06 \x03(\t\"\x95\x01\n\x08Mutation\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08\x66ilename\x18\x02 \x01(\t\x12\x0e\n\x06lineno\x18\x03 \x01(\x05\x12\r\n\x05index\x18\x08 \x01(\x03\x12\x0e\n\x06offset\x18\t \x01(\x05\x12\x0e\n\x06target\x18\x04 \x01(\t\x12\r\n\x05value\x18\x05 \x01(\t\x12\x0c\n\x04repr\x18\n \x01(\t\x12\x0f\n\x07sources\x18\x07 \x03(\t\"g\n\x08\x44\x65letion\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08\x66ilename\x18\x02 \x01(\t\x12\x0e\n\x06lineno\x18\x03 \x01(\x05\x12\r\n\x05index\x18\x05 \x01(\x03\x12\x0e\n\x06offset\x18\x06 \x01(\x05\x12\x0e\n\x06target\x18\x04 \x01(\t\"\x83\x01\n\x06Return\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08\x66ilename\x18\x02 \x01(\t\x12\x0e\n\x06lineno\x18\x03 \x01(\x05\x12\r\n\x05index\x18\x04 \x01(\x03\x12\x0e\n\x06offset\x18\x05 \x01(\x05\x12\r\n\x05value\x18\x06 \x01(\t\x12\x0c\n\x04repr\x18\x08 \x01(\t\x12\x0f\n\x07sources\x18\x07 \x03(\t\"w\n\x13JumpBackToLoopStart\x12\n\n\x02id\x18\x01 \x01(\t\x12\x10\n\x08\x66ilename\x18\x02 \x01(\t\x12\x0e\n\x06lineno\x18\x03 \x01(\x05\x12\r\n\x05index\x18\x04 \x01(\x03\x12\x0e\n\x06offset\x18\x05 \x01(\x05\x12\x13\n\x0bjump_target\x18\x06 \x01(\x05\"\xe7\x01\n\x05\x45vent\x12&\n\rinitial_value\x18\x01 \x01(\x0b\x32\r.InitialValueH\x00\x12\x1b\n\x07\x62inding\x18\x02 \x01(\x0b\x32\x08.BindingH\x00\x12\x1d\n\x08mutation\x18\x03 \x01(\x0b\x32\t.MutationH\x00\x12\x1d\n\x08\x64\x65letion\x18\x04 \x01(\x0b\x32\t.DeletionH\x00\x12\x19\n\x06return\x18\x05 \x01(\x0b\x32\x07.ReturnH\x00\x12\x37\n\x17jump_back_to_loop_start\x18\x06 \x01(\x0b\x32\x14.JumpBackToLoopStartH\x00\x42\x07\n\x05value\" \n\x0b\x45ventIDList\x12\x11\n\tevent_ids\x18\x01 \x03(\t\"F\n\x04Loop\x12\x14\n\x0cstart_offset\x18\x01 \x01(\x05\x12\x12\n\nend_offset\x18\x02 \x01(\x05\x12\x14\n\x0cstart_lineno\x18\x03 \x01(\x05\"\xe2\x01\n\x05\x46rame\x12\x1f\n\x08metadata\x18\x01 \x01(\x0b\x32\r.FrameLocater\x12\x16\n\x06\x65vents\x18\x02 \x03(\x0b\x32\x06.Event\x12\x14\n\x05loops\x18\x03 \x03(\x0b\x32\x05.Loop\x12\x31\n\x0etracing_result\x18\x04 \x03(\x0b\x32\x19.Frame.TracingResultEntry\x12\x13\n\x0bidentifiers\x18\x05 \x03(\t\x1a\x42\n\x12TracingResultEntry\x12\x0b\n\x03key\x18\x01 \x01(\t\x12\x1b\n\x05value\x18\x02 \x01(\x0b\x32\x0c.EventIDList:\x02\x38\x01\x32\x89\x01\n\rCommunication\x12\x1f\n\tSyncState\x12\x06.State\x1a\x06.State\"\x00\x30\x01\x12\x32\n\nFindFrames\x12\x0f.CursorPosition\x1a\x11.FrameLocaterList\"\x00\x12#\n\x08GetFrame\x12\r.FrameLocater\x1a\x06.Frame\"\x00'
)
_STATE_STATUS = _descriptor.EnumDescriptor(
name='Status',
full_name='State.Status',
filename=None,
file=DESCRIPTOR,
create_key=_descriptor._internal_create_key,
values=[
_descriptor.EnumValueDescriptor(
name='CLIENT_READY', index=0, number=1,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='SERVER_READY', index=1, number=2,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
_descriptor.EnumValueDescriptor(
name='EXECUTION_COMPLETE', index=2, number=3,
serialized_options=None,
type=None,
create_key=_descriptor._internal_create_key),
],
containing_type=None,
serialized_options=None,
serialized_start=80,
serialized_end=148,
)
_sym_db.RegisterEnumDescriptor(_STATE_STATUS)
_STATE = _descriptor.Descriptor(
name='State',
full_name='State',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='status', full_name='State.status', index=0,
number=1, type=14, cpp_type=8, label=1,
has_default_value=False, default_value=1,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='message', full_name='State.message', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
_STATE_STATUS,
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=23,
serialized_end=148,
)
_CURSORPOSITION = _descriptor.Descriptor(
name='CursorPosition',
full_name='CursorPosition',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='filename', full_name='CursorPosition.filename', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lineno', full_name='CursorPosition.lineno', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='character', full_name='CursorPosition.character', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=150,
serialized_end=219,
)
_FRAMELOCATER = _descriptor.Descriptor(
name='FrameLocater',
full_name='FrameLocater',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='frame_id', full_name='FrameLocater.frame_id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='frame_name', full_name='FrameLocater.frame_name', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='filename', full_name='FrameLocater.filename', index=2,
number=3, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='start_lineno', full_name='FrameLocater.start_lineno', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='end_lineno', full_name='FrameLocater.end_lineno', index=4,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='callsite_filename', full_name='FrameLocater.callsite_filename', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='callsite_lineno', full_name='FrameLocater.callsite_lineno', index=6,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='arguments', full_name='FrameLocater.arguments', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=222,
serialized_end=405,
)
_FRAMELOCATERLIST = _descriptor.Descriptor(
name='FrameLocaterList',
full_name='FrameLocaterList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='frame_locaters', full_name='FrameLocaterList.frame_locaters', index=0,
number=1, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=407,
serialized_end=464,
)
_INITIALVALUE = _descriptor.Descriptor(
name='InitialValue',
full_name='InitialValue',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='InitialValue.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='filename', full_name='InitialValue.filename', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lineno', full_name='InitialValue.lineno', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='index', full_name='InitialValue.index', index=3,
number=6, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='offset', full_name='InitialValue.offset', index=4,
number=7, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='target', full_name='InitialValue.target', index=5,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='InitialValue.value', index=6,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repr', full_name='InitialValue.repr', index=7,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=467,
serialized_end=603,
)
_BINDING = _descriptor.Descriptor(
name='Binding',
full_name='Binding',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Binding.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='filename', full_name='Binding.filename', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lineno', full_name='Binding.lineno', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='target', full_name='Binding.target', index=3,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='index', full_name='Binding.index', index=4,
number=7, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='offset', full_name='Binding.offset', index=5,
number=8, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='Binding.value', index=6,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repr', full_name='Binding.repr', index=7,
number=9, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sources', full_name='Binding.sources', index=8,
number=6, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=606,
serialized_end=754,
)
_MUTATION = _descriptor.Descriptor(
name='Mutation',
full_name='Mutation',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Mutation.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='filename', full_name='Mutation.filename', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lineno', full_name='Mutation.lineno', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='index', full_name='Mutation.index', index=3,
number=8, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='offset', full_name='Mutation.offset', index=4,
number=9, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='target', full_name='Mutation.target', index=5,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='Mutation.value', index=6,
number=5, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repr', full_name='Mutation.repr', index=7,
number=10, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sources', full_name='Mutation.sources', index=8,
number=7, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=757,
serialized_end=906,
)
_DELETION = _descriptor.Descriptor(
name='Deletion',
full_name='Deletion',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Deletion.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='filename', full_name='Deletion.filename', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lineno', full_name='Deletion.lineno', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='index', full_name='Deletion.index', index=3,
number=5, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='offset', full_name='Deletion.offset', index=4,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='target', full_name='Deletion.target', index=5,
number=4, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=908,
serialized_end=1011,
)
_RETURN = _descriptor.Descriptor(
name='Return',
full_name='Return',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='Return.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='filename', full_name='Return.filename', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lineno', full_name='Return.lineno', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='index', full_name='Return.index', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='offset', full_name='Return.offset', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='Return.value', index=5,
number=6, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='repr', full_name='Return.repr', index=6,
number=8, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='sources', full_name='Return.sources', index=7,
number=7, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1014,
serialized_end=1145,
)
_JUMPBACKTOLOOPSTART = _descriptor.Descriptor(
name='JumpBackToLoopStart',
full_name='JumpBackToLoopStart',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='id', full_name='JumpBackToLoopStart.id', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='filename', full_name='JumpBackToLoopStart.filename', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='lineno', full_name='JumpBackToLoopStart.lineno', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='index', full_name='JumpBackToLoopStart.index', index=3,
number=4, type=3, cpp_type=2, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='offset', full_name='JumpBackToLoopStart.offset', index=4,
number=5, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='jump_target', full_name='JumpBackToLoopStart.jump_target', index=5,
number=6, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1147,
serialized_end=1266,
)
_EVENT = _descriptor.Descriptor(
name='Event',
full_name='Event',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='initial_value', full_name='Event.initial_value', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='binding', full_name='Event.binding', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='mutation', full_name='Event.mutation', index=2,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='deletion', full_name='Event.deletion', index=3,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='return', full_name='Event.return', index=4,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='jump_back_to_loop_start', full_name='Event.jump_back_to_loop_start', index=5,
number=6, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='value', full_name='Event.value',
index=0, containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[]),
],
serialized_start=1269,
serialized_end=1500,
)
_EVENTIDLIST = _descriptor.Descriptor(
name='EventIDList',
full_name='EventIDList',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='event_ids', full_name='EventIDList.event_ids', index=0,
number=1, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1502,
serialized_end=1534,
)
_LOOP = _descriptor.Descriptor(
name='Loop',
full_name='Loop',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='start_offset', full_name='Loop.start_offset', index=0,
number=1, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='end_offset', full_name='Loop.end_offset', index=1,
number=2, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='start_lineno', full_name='Loop.start_lineno', index=2,
number=3, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1536,
serialized_end=1606,
)
_FRAME_TRACINGRESULTENTRY = _descriptor.Descriptor(
name='TracingResultEntry',
full_name='Frame.TracingResultEntry',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='key', full_name='Frame.TracingResultEntry.key', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='value', full_name='Frame.TracingResultEntry.value', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'8\001',
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1769,
serialized_end=1835,
)
_FRAME = _descriptor.Descriptor(
name='Frame',
full_name='Frame',
filename=None,
file=DESCRIPTOR,
containing_type=None,
create_key=_descriptor._internal_create_key,
fields=[
_descriptor.FieldDescriptor(
name='metadata', full_name='Frame.metadata', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='events', full_name='Frame.events', index=1,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='loops', full_name='Frame.loops', index=2,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='tracing_result', full_name='Frame.tracing_result', index=3,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
_descriptor.FieldDescriptor(
name='identifiers', full_name='Frame.identifiers', index=4,
number=5, type=9, cpp_type=9, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR, create_key=_descriptor._internal_create_key),
],
extensions=[
],
nested_types=[_FRAME_TRACINGRESULTENTRY, ],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto2',
extension_ranges=[],
oneofs=[
],
serialized_start=1609,
serialized_end=1835,
)
_STATE.fields_by_name['status'].enum_type = _STATE_STATUS
_STATE_STATUS.containing_type = _STATE
_FRAMELOCATERLIST.fields_by_name['frame_locaters'].message_type = _FRAMELOCATER
_EVENT.fields_by_name['initial_value'].message_type = _INITIALVALUE
_EVENT.fields_by_name['binding'].message_type = _BINDING
_EVENT.fields_by_name['mutation'].message_type = _MUTATION
_EVENT.fields_by_name['deletion'].message_type = _DELETION
_EVENT.fields_by_name['return'].message_type = _RETURN
_EVENT.fields_by_name['jump_back_to_loop_start'].message_type = _JUMPBACKTOLOOPSTART
_EVENT.oneofs_by_name['value'].fields.append(
_EVENT.fields_by_name['initial_value'])
_EVENT.fields_by_name['initial_value'].containing_oneof = _EVENT.oneofs_by_name['value']
_EVENT.oneofs_by_name['value'].fields.append(
_EVENT.fields_by_name['binding'])
_EVENT.fields_by_name['binding'].containing_oneof = _EVENT.oneofs_by_name['value']
_EVENT.oneofs_by_name['value'].fields.append(
_EVENT.fields_by_name['mutation'])
_EVENT.fields_by_name['mutation'].containing_oneof = _EVENT.oneofs_by_name['value']
_EVENT.oneofs_by_name['value'].fields.append(
_EVENT.fields_by_name['deletion'])
_EVENT.fields_by_name['deletion'].containing_oneof = _EVENT.oneofs_by_name['value']
_EVENT.oneofs_by_name['value'].fields.append(
_EVENT.fields_by_name['return'])
_EVENT.fields_by_name['return'].containing_oneof = _EVENT.oneofs_by_name['value']
_EVENT.oneofs_by_name['value'].fields.append(
_EVENT.fields_by_name['jump_back_to_loop_start'])
_EVENT.fields_by_name['jump_back_to_loop_start'].containing_oneof = _EVENT.oneofs_by_name['value']
_FRAME_TRACINGRESULTENTRY.fields_by_name['value'].message_type = _EVENTIDLIST
_FRAME_TRACINGRESULTENTRY.containing_type = _FRAME
_FRAME.fields_by_name['metadata'].message_type = _FRAMELOCATER
_FRAME.fields_by_name['events'].message_type = _EVENT
_FRAME.fields_by_name['loops'].message_type = _LOOP
_FRAME.fields_by_name['tracing_result'].message_type = _FRAME_TRACINGRESULTENTRY
DESCRIPTOR.message_types_by_name['State'] = _STATE
DESCRIPTOR.message_types_by_name['CursorPosition'] = _CURSORPOSITION
DESCRIPTOR.message_types_by_name['FrameLocater'] = _FRAMELOCATER
DESCRIPTOR.message_types_by_name['FrameLocaterList'] = _FRAMELOCATERLIST
DESCRIPTOR.message_types_by_name['InitialValue'] = _INITIALVALUE
DESCRIPTOR.message_types_by_name['Binding'] = _BINDING
DESCRIPTOR.message_types_by_name['Mutation'] = _MUTATION
DESCRIPTOR.message_types_by_name['Deletion'] = _DELETION
DESCRIPTOR.message_types_by_name['Return'] = _RETURN
DESCRIPTOR.message_types_by_name['JumpBackToLoopStart'] = _JUMPBACKTOLOOPSTART
DESCRIPTOR.message_types_by_name['Event'] = _EVENT
DESCRIPTOR.message_types_by_name['EventIDList'] = _EVENTIDLIST
DESCRIPTOR.message_types_by_name['Loop'] = _LOOP
DESCRIPTOR.message_types_by_name['Frame'] = _FRAME
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
State = _reflection.GeneratedProtocolMessageType('State', (_message.Message,), {
'DESCRIPTOR' : _STATE,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:State)
})
_sym_db.RegisterMessage(State)
CursorPosition = _reflection.GeneratedProtocolMessageType('CursorPosition', (_message.Message,), {
'DESCRIPTOR' : _CURSORPOSITION,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:CursorPosition)
})
_sym_db.RegisterMessage(CursorPosition)
FrameLocater = _reflection.GeneratedProtocolMessageType('FrameLocater', (_message.Message,), {
'DESCRIPTOR' : _FRAMELOCATER,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:FrameLocater)
})
_sym_db.RegisterMessage(FrameLocater)
FrameLocaterList = _reflection.GeneratedProtocolMessageType('FrameLocaterList', (_message.Message,), {
'DESCRIPTOR' : _FRAMELOCATERLIST,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:FrameLocaterList)
})
_sym_db.RegisterMessage(FrameLocaterList)
InitialValue = _reflection.GeneratedProtocolMessageType('InitialValue', (_message.Message,), {
'DESCRIPTOR' : _INITIALVALUE,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:InitialValue)
})
_sym_db.RegisterMessage(InitialValue)
Binding = _reflection.GeneratedProtocolMessageType('Binding', (_message.Message,), {
'DESCRIPTOR' : _BINDING,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:Binding)
})
_sym_db.RegisterMessage(Binding)
Mutation = _reflection.GeneratedProtocolMessageType('Mutation', (_message.Message,), {
'DESCRIPTOR' : _MUTATION,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:Mutation)
})
_sym_db.RegisterMessage(Mutation)
Deletion = _reflection.GeneratedProtocolMessageType('Deletion', (_message.Message,), {
'DESCRIPTOR' : _DELETION,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:Deletion)
})
_sym_db.RegisterMessage(Deletion)
Return = _reflection.GeneratedProtocolMessageType('Return', (_message.Message,), {
'DESCRIPTOR' : _RETURN,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:Return)
})
_sym_db.RegisterMessage(Return)
JumpBackToLoopStart = _reflection.GeneratedProtocolMessageType('JumpBackToLoopStart', (_message.Message,), {
'DESCRIPTOR' : _JUMPBACKTOLOOPSTART,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:JumpBackToLoopStart)
})
_sym_db.RegisterMessage(JumpBackToLoopStart)
Event = _reflection.GeneratedProtocolMessageType('Event', (_message.Message,), {
'DESCRIPTOR' : _EVENT,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:Event)
})
_sym_db.RegisterMessage(Event)
EventIDList = _reflection.GeneratedProtocolMessageType('EventIDList', (_message.Message,), {
'DESCRIPTOR' : _EVENTIDLIST,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:EventIDList)
})
_sym_db.RegisterMessage(EventIDList)
Loop = _reflection.GeneratedProtocolMessageType('Loop', (_message.Message,), {
'DESCRIPTOR' : _LOOP,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:Loop)
})
_sym_db.RegisterMessage(Loop)
Frame = _reflection.GeneratedProtocolMessageType('Frame', (_message.Message,), {
'TracingResultEntry' : _reflection.GeneratedProtocolMessageType('TracingResultEntry', (_message.Message,), {
'DESCRIPTOR' : _FRAME_TRACINGRESULTENTRY,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:Frame.TracingResultEntry)
})
,
'DESCRIPTOR' : _FRAME,
'__module__' : 'communication_pb2'
# @@protoc_insertion_point(class_scope:Frame)
})
_sym_db.RegisterMessage(Frame)
_sym_db.RegisterMessage(Frame.TracingResultEntry)
_FRAME_TRACINGRESULTENTRY._options = None
_COMMUNICATION = _descriptor.ServiceDescriptor(
name='Communication',
full_name='Communication',
file=DESCRIPTOR,
index=0,
serialized_options=None,
create_key=_descriptor._internal_create_key,
serialized_start=1838,
serialized_end=1975,
methods=[
_descriptor.MethodDescriptor(
name='SyncState',
full_name='Communication.SyncState',
index=0,
containing_service=None,
input_type=_STATE,
output_type=_STATE,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='FindFrames',
full_name='Communication.FindFrames',
index=1,
containing_service=None,
input_type=_CURSORPOSITION,
output_type=_FRAMELOCATERLIST,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
_descriptor.MethodDescriptor(
name='GetFrame',
full_name='Communication.GetFrame',
index=2,
containing_service=None,
input_type=_FRAMELOCATER,
output_type=_FRAME,
serialized_options=None,
create_key=_descriptor._internal_create_key,
),
])
_sym_db.RegisterServiceDescriptor(_COMMUNICATION)
DESCRIPTOR.services_by_name['Communication'] = _COMMUNICATION
# @@protoc_insertion_point(module_scope)
| 43.378814
| 4,014
| 0.744173
| 6,656
| 51,187
| 5.397386
| 0.04402
| 0.055894
| 0.088852
| 0.07666
| 0.763981
| 0.733946
| 0.726764
| 0.721113
| 0.700097
| 0.691301
| 0
| 0.040039
| 0.129056
| 51,187
| 1,179
| 4,015
| 43.415606
| 0.765797
| 0.018618
| 0
| 0.704545
| 1
| 0.000909
| 0.156824
| 0.09323
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.003636
| 0
| 0.003636
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
673630355104b52334ed3925b7ad4cb417a33f9e
| 23,595
|
py
|
Python
|
isi_sdk_8_1_0/isi_sdk_8_1_0/api/dedupe_api.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 24
|
2018-06-22T14:13:23.000Z
|
2022-03-23T01:21:26.000Z
|
isi_sdk_8_1_0/isi_sdk_8_1_0/api/dedupe_api.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 46
|
2018-04-30T13:28:22.000Z
|
2022-03-21T21:11:07.000Z
|
isi_sdk_8_1_0/isi_sdk_8_1_0/api/dedupe_api.py
|
mohitjain97/isilon_sdk_python
|
a371f438f542568edb8cda35e929e6b300b1177c
|
[
"Unlicense"
] | 29
|
2018-06-19T00:14:04.000Z
|
2022-02-08T17:51:19.000Z
|
# coding: utf-8
"""
Isilon SDK
Isilon SDK - Language bindings for the OneFS API # noqa: E501
OpenAPI spec version: 5
Contact: sdk@isilon.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from isi_sdk_8_1_0.api_client import ApiClient
class DedupeApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def get_dedupe_dedupe_summary(self, **kwargs): # noqa: E501
"""get_dedupe_dedupe_summary # noqa: E501
Return summary information about dedupe. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dedupe_dedupe_summary(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: DedupeDedupeSummary
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_dedupe_dedupe_summary_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_dedupe_dedupe_summary_with_http_info(**kwargs) # noqa: E501
return data
def get_dedupe_dedupe_summary_with_http_info(self, **kwargs): # noqa: E501
"""get_dedupe_dedupe_summary # noqa: E501
Return summary information about dedupe. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dedupe_dedupe_summary_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: DedupeDedupeSummary
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dedupe_dedupe_summary" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/1/dedupe/dedupe-summary', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DedupeDedupeSummary', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_dedupe_report(self, dedupe_report_id, **kwargs): # noqa: E501
"""get_dedupe_report # noqa: E501
Retrieve a report for a single dedupe job. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dedupe_report(dedupe_report_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str dedupe_report_id: Retrieve a report for a single dedupe job. (required)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: DedupeReports
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_dedupe_report_with_http_info(dedupe_report_id, **kwargs) # noqa: E501
else:
(data) = self.get_dedupe_report_with_http_info(dedupe_report_id, **kwargs) # noqa: E501
return data
def get_dedupe_report_with_http_info(self, dedupe_report_id, **kwargs): # noqa: E501
"""get_dedupe_report # noqa: E501
Retrieve a report for a single dedupe job. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dedupe_report_with_http_info(dedupe_report_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str dedupe_report_id: Retrieve a report for a single dedupe job. (required)
:param str scope: If specified as \"effective\" or not specified, all fields are returned. If specified as \"user\", only fields with non-default values are shown. If specified as \"default\", the original values are returned.
:return: DedupeReports
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['dedupe_report_id', 'scope'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dedupe_report" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'dedupe_report_id' is set
if ('dedupe_report_id' not in params or
params['dedupe_report_id'] is None):
raise ValueError("Missing the required parameter `dedupe_report_id` when calling `get_dedupe_report`") # noqa: E501
if ('scope' in params and
len(params['scope']) > 255):
raise ValueError("Invalid value for parameter `scope` when calling `get_dedupe_report`, length must be less than or equal to `255`") # noqa: E501
if ('scope' in params and
len(params['scope']) < 0):
raise ValueError("Invalid value for parameter `scope` when calling `get_dedupe_report`, length must be greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
if 'dedupe_report_id' in params:
path_params['DedupeReportId'] = params['dedupe_report_id'] # noqa: E501
query_params = []
if 'scope' in params:
query_params.append(('scope', params['scope'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/1/dedupe/reports/{DedupeReportId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DedupeReports', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_dedupe_reports(self, **kwargs): # noqa: E501
"""get_dedupe_reports # noqa: E501
List dedupe reports. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dedupe_reports(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str sort: The field that will be used for sorting.
:param int begin: Restrict the query to reports at or after the given time, in seconds since the Epoch.
:param int end: Restrict the query to reports at or before the given time, in seconds since the Epoch.
:param int job_id: Restrict the query to the given job ID.
:param str resume: Continue returning results from previous call using this token (token should come from the previous call, resume cannot be used with other options).
:param str job_type: Restrict the query to the given job type.
:param int limit: Return no more than this many results at once (see resume).
:param str dir: The direction of the sort.
:return: DedupeReportsExtended
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_dedupe_reports_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_dedupe_reports_with_http_info(**kwargs) # noqa: E501
return data
def get_dedupe_reports_with_http_info(self, **kwargs): # noqa: E501
"""get_dedupe_reports # noqa: E501
List dedupe reports. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dedupe_reports_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str sort: The field that will be used for sorting.
:param int begin: Restrict the query to reports at or after the given time, in seconds since the Epoch.
:param int end: Restrict the query to reports at or before the given time, in seconds since the Epoch.
:param int job_id: Restrict the query to the given job ID.
:param str resume: Continue returning results from previous call using this token (token should come from the previous call, resume cannot be used with other options).
:param str job_type: Restrict the query to the given job type.
:param int limit: Return no more than this many results at once (see resume).
:param str dir: The direction of the sort.
:return: DedupeReportsExtended
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['sort', 'begin', 'end', 'job_id', 'resume', 'job_type', 'limit', 'dir'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dedupe_reports" % key
)
params[key] = val
del params['kwargs']
if ('resume' in params and
len(params['resume']) > 8192):
raise ValueError("Invalid value for parameter `resume` when calling `get_dedupe_reports`, length must be less than or equal to `8192`") # noqa: E501
if ('resume' in params and
len(params['resume']) < 0):
raise ValueError("Invalid value for parameter `resume` when calling `get_dedupe_reports`, length must be greater than or equal to `0`") # noqa: E501
if 'limit' in params and params['limit'] > 4294967295: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_dedupe_reports`, must be a value less than or equal to `4294967295`") # noqa: E501
if 'limit' in params and params['limit'] < 1: # noqa: E501
raise ValueError("Invalid value for parameter `limit` when calling `get_dedupe_reports`, must be a value greater than or equal to `1`") # noqa: E501
if ('dir' in params and
len(params['dir']) < 0):
raise ValueError("Invalid value for parameter `dir` when calling `get_dedupe_reports`, length must be greater than or equal to `0`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'sort' in params:
query_params.append(('sort', params['sort'])) # noqa: E501
if 'begin' in params:
query_params.append(('begin', params['begin'])) # noqa: E501
if 'end' in params:
query_params.append(('end', params['end'])) # noqa: E501
if 'job_id' in params:
query_params.append(('job_id', params['job_id'])) # noqa: E501
if 'resume' in params:
query_params.append(('resume', params['resume'])) # noqa: E501
if 'job_type' in params:
query_params.append(('job_type', params['job_type'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
if 'dir' in params:
query_params.append(('dir', params['dir'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/1/dedupe/reports', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DedupeReportsExtended', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_dedupe_settings(self, **kwargs): # noqa: E501
"""get_dedupe_settings # noqa: E501
Retrieve the dedupe settings. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dedupe_settings(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: DedupeSettings
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_dedupe_settings_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_dedupe_settings_with_http_info(**kwargs) # noqa: E501
return data
def get_dedupe_settings_with_http_info(self, **kwargs): # noqa: E501
"""get_dedupe_settings # noqa: E501
Retrieve the dedupe settings. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_dedupe_settings_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: DedupeSettings
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_dedupe_settings" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/1/dedupe/settings', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='DedupeSettings', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_dedupe_settings(self, dedupe_settings, **kwargs): # noqa: E501
"""update_dedupe_settings # noqa: E501
Modify the dedupe settings. All input fields are optional, but one or more must be supplied. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_dedupe_settings(dedupe_settings, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DedupeSettingsExtended dedupe_settings: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_dedupe_settings_with_http_info(dedupe_settings, **kwargs) # noqa: E501
else:
(data) = self.update_dedupe_settings_with_http_info(dedupe_settings, **kwargs) # noqa: E501
return data
def update_dedupe_settings_with_http_info(self, dedupe_settings, **kwargs): # noqa: E501
"""update_dedupe_settings # noqa: E501
Modify the dedupe settings. All input fields are optional, but one or more must be supplied. # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_dedupe_settings_with_http_info(dedupe_settings, async_req=True)
>>> result = thread.get()
:param async_req bool
:param DedupeSettingsExtended dedupe_settings: (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['dedupe_settings'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_dedupe_settings" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'dedupe_settings' is set
if ('dedupe_settings' not in params or
params['dedupe_settings'] is None):
raise ValueError("Missing the required parameter `dedupe_settings` when calling `update_dedupe_settings`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'dedupe_settings' in params:
body_params = params['dedupe_settings']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['basicAuth'] # noqa: E501
return self.api_client.call_api(
'/platform/1/dedupe/settings', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 42.058824
| 236
| 0.621869
| 2,819
| 23,595
| 4.974814
| 0.084072
| 0.052481
| 0.019966
| 0.02567
| 0.919566
| 0.899102
| 0.877353
| 0.865873
| 0.85425
| 0.836851
| 0
| 0.019839
| 0.28862
| 23,595
| 560
| 237
| 42.133929
| 0.815669
| 0.347574
| 0
| 0.684385
| 1
| 0.023256
| 0.219493
| 0.048065
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036545
| false
| 0
| 0.013289
| 0
| 0.10299
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
674221e3c533445e6fada67b19fe41496e786fe6
| 1,855
|
py
|
Python
|
modelzoo/migrations/0027_auto_20201020_1434.py
|
SuperElastix/ElastixModelZooWebsite
|
00d7b4aec8eb04c285d3771d53310079a3443fab
|
[
"Apache-2.0"
] | 1
|
2021-11-15T07:30:24.000Z
|
2021-11-15T07:30:24.000Z
|
modelzoo/migrations/0027_auto_20201020_1434.py
|
SuperElastix/ElastixModelZooWebsite
|
00d7b4aec8eb04c285d3771d53310079a3443fab
|
[
"Apache-2.0"
] | null | null | null |
modelzoo/migrations/0027_auto_20201020_1434.py
|
SuperElastix/ElastixModelZooWebsite
|
00d7b4aec8eb04c285d3771d53310079a3443fab
|
[
"Apache-2.0"
] | null | null | null |
# Generated by Django 3.0.3 on 2020-10-20 12:34
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('modelzoo', '0026_model_slug'),
]
operations = [
migrations.AddField(
model_name='model',
name='image1',
field=models.FileField(blank=True, default='', upload_to='uploads/'),
),
migrations.AddField(
model_name='model',
name='image2',
field=models.FileField(blank=True, default='', upload_to='uploads/'),
),
migrations.AddField(
model_name='model',
name='image3',
field=models.FileField(blank=True, default='', upload_to='uploads/'),
),
migrations.AddField(
model_name='model',
name='image4',
field=models.FileField(blank=True, default='', upload_to='uploads/'),
),
migrations.AddField(
model_name='model',
name='image5',
field=models.FileField(blank=True, default='', upload_to='uploads/'),
),
migrations.AddField(
model_name='model',
name='image6',
field=models.FileField(blank=True, default='', upload_to='uploads/'),
),
migrations.AddField(
model_name='model',
name='image7',
field=models.FileField(blank=True, default='', upload_to='uploads/'),
),
migrations.AddField(
model_name='model',
name='image8',
field=models.FileField(blank=True, default='', upload_to='uploads/'),
),
migrations.AddField(
model_name='model',
name='image9',
field=models.FileField(blank=True, default='', upload_to='uploads/'),
),
]
| 31.440678
| 81
| 0.539623
| 172
| 1,855
| 5.703488
| 0.25
| 0.165138
| 0.211009
| 0.247706
| 0.798165
| 0.798165
| 0.761468
| 0.761468
| 0.761468
| 0.70948
| 0
| 0.022047
| 0.315364
| 1,855
| 58
| 82
| 31.982759
| 0.750394
| 0.024259
| 0
| 0.692308
| 1
| 0
| 0.107301
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019231
| 0
| 0.076923
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
67773d6ef3d968b5e682f9449457348c2237c5b4
| 10,083
|
py
|
Python
|
tests/do_sampler/test_pandas_do_api.py
|
Sid-darthvader/dowhy
|
535cdb47d7eed8988573770769ecea3856180b48
|
[
"MIT"
] | 2,904
|
2019-05-07T08:09:33.000Z
|
2022-03-31T18:28:41.000Z
|
tests/do_sampler/test_pandas_do_api.py
|
Sid-darthvader/dowhy
|
535cdb47d7eed8988573770769ecea3856180b48
|
[
"MIT"
] | 238
|
2019-05-11T02:57:22.000Z
|
2022-03-31T23:47:18.000Z
|
tests/do_sampler/test_pandas_do_api.py
|
Sid-darthvader/dowhy
|
535cdb47d7eed8988573770769ecea3856180b48
|
[
"MIT"
] | 527
|
2019-05-08T16:23:45.000Z
|
2022-03-30T21:02:41.000Z
|
import pytest
import numpy as np
import pandas as pd
import dowhy.datasets
import dowhy.api
from sklearn.linear_model import LinearRegression
@pytest.mark.usefixtures("fixed_seed")
class TestPandasDoAPI(object):
@pytest.mark.parametrize(["N", "error_tolerance"],
[(10000, 0.1),])
def test_pandas_api_discrete_cause_continuous_confounder(self, N, error_tolerance):
data = dowhy.datasets.linear_dataset(beta=10,
num_common_causes=1,
num_instruments=1,
num_samples=N,
treatment_is_binary=False)
X0 = np.random.normal(size=N)
v = (np.random.normal(size=N) + X0).astype(int)
y = data['ate']*v + X0 + np.random.normal()
data['df']['v'] = v
data['df']['X0'] = X0
data['df']['y'] = y
df = data['df'].copy()
variable_types = {'v': 'd', 'X0': 'c', 'y': 'c'}
outcome = 'y'
cause = 'v'
common_causes = 'X0'
method = 'weighting'
causal_df = df.causal.do(x=cause,
variable_types=variable_types,
outcome=outcome,
method=method,
common_causes=common_causes,
proceed_when_unidentifiable=True)
ate = (causal_df[causal_df.v == 1].mean() \
- causal_df[causal_df.v == 0].mean())['y']
error = np.abs(ate - data['ate'])
res = True if (error < data['ate'] * error_tolerance) else False
print("Error in ATE estimate = {0} with tolerance {1}%. Estimated={2},True={3}".format(
error, error_tolerance * 100, ate, data['ate'])
)
assert res
@pytest.mark.parametrize(["N", "error_tolerance"],
[(10000, 0.1),])
def test_pandas_api_discrete_cause_discrete_confounder(self, N, error_tolerance):
data = dowhy.datasets.linear_dataset(beta=10,
num_common_causes=1,
num_instruments=1,
num_samples=N,
treatment_is_binary=False)
X0 = np.random.normal(size=N).astype(int)
v = (np.random.normal(size=N) + X0).astype(int)
y = data['ate'] * v + X0 + np.random.normal()
data['df']['v'] = v
data['df']['X0'] = X0
data['df']['y'] = y
df = data['df'].copy()
variable_types = {'v': 'd', 'X0': 'd', 'y': 'c'}
outcome = 'y'
cause = 'v'
common_causes = 'X0'
method = 'weighting'
causal_df = df.causal.do(x=cause,
variable_types=variable_types,
outcome=outcome,
method=method,
common_causes=common_causes,
proceed_when_unidentifiable=True)
ate = (causal_df[causal_df.v == 1].mean() \
- causal_df[causal_df.v == 0].mean())['y']
print('ate', ate)
error = np.abs(ate - data['ate'])
res = True if (error < data['ate'] * error_tolerance) else False
print("Error in ATE estimate = {0} with tolerance {1}%. Estimated={2},True={3}".format(
error, error_tolerance * 100, ate, data['ate'])
)
assert res
@pytest.mark.parametrize(["N", "error_tolerance"],
[(10000, 0.1),])
def test_pandas_api_continuous_cause_discrete_confounder(self, N, error_tolerance):
data = dowhy.datasets.linear_dataset(beta=10,
num_common_causes=1,
num_instruments=1,
num_samples=N,
treatment_is_binary=False)
X0 = np.random.normal(size=N).astype(int)
v = np.random.normal(size=N) + X0
y = data['ate'] * v + X0 + np.random.normal()
data['df']['v'] = v
data['df']['X0'] = X0
data['df']['y'] = y
df = data['df'].copy()
variable_types = {'v': 'c', 'X0': 'd', 'y': 'c'}
outcome = 'y'
cause = 'v'
common_causes = 'X0'
method = 'weighting'
causal_df = df.causal.do(x=cause,
variable_types=variable_types,
outcome=outcome,
method=method,
common_causes=common_causes,
proceed_when_unidentifiable=True)
ate = LinearRegression().fit(causal_df[['v']], causal_df['y']).coef_[0]
print('ate', ate)
error = np.abs(ate - data['ate'])
res = True if (error < data['ate'] * error_tolerance) else False
print("Error in ATE estimate = {0} with tolerance {1}%. Estimated={2},True={3}".format(
error, error_tolerance * 100, ate, data['ate'])
)
assert res
@pytest.mark.parametrize(["N", "error_tolerance"],
[(10000, 0.1),])
def test_pandas_api_continuous_cause_continuous_confounder(self, N, error_tolerance):
data = dowhy.datasets.linear_dataset(beta=10,
num_common_causes=1,
num_instruments=1,
num_samples=N,
treatment_is_binary=False)
X0 = np.random.normal(size=N)
v = np.random.normal(size=N) + X0
y = data['ate'] * v + X0 + np.random.normal()
data['df']['v'] = v
data['df']['X0'] = X0
data['df']['y'] = y
df = data['df'].copy()
variable_types = {'v': 'c', 'X0': 'c', 'y': 'c'}
outcome = 'y'
cause = 'v'
common_causes = 'X0'
method = 'weighting'
causal_df = df.causal.do(x=cause,
variable_types=variable_types,
outcome=outcome,
method=method,
common_causes=common_causes,
proceed_when_unidentifiable=True)
ate = LinearRegression().fit(causal_df[['v']], causal_df['y']).coef_[0]
print('ate', ate)
error = np.abs(ate - data['ate'])
res = True if (error < data['ate'] * error_tolerance) else False
print("Error in ATE estimate = {0} with tolerance {1}%. Estimated={2},True={3}".format(
error, error_tolerance * 100, ate, data['ate'])
)
assert res
'''
In the following three tests, we have made use of the assert True at the end, but it is not
a tautology due to the fact the function being tested has the ability to raise an exception
when it belives that the behavior of the function is wrong.
'''
@pytest.mark.parametrize(["N","variable_types"],
[(10000,{'v0': 'b', 'y': 'c', 'W0': 'c'}),])
def test_pandas_api_with_full_specification_of_type(self, N, variable_types):
data = dowhy.datasets.linear_dataset(beta=5,
num_common_causes=1,
num_instruments = 0,
num_samples=1000,
treatment_is_binary=True)
data['df'].causal.do(x='v0',
variable_types=variable_types,
outcome='y',
proceed_when_unidentifiable=True,
common_causes=['W0']).groupby('v0').mean()
assert True
@pytest.mark.parametrize(["N","variable_types"],
[(10000,{'v0': 'b', 'W0': 'c'}),])
def test_pandas_api_with_partial_specification_of_type(self, N, variable_types):
data = dowhy.datasets.linear_dataset(beta=5,
num_common_causes=1,
num_instruments = 0,
num_samples=1000,
treatment_is_binary=True)
data['df'].causal.do(x='v0',
variable_types=variable_types,
outcome='y',
proceed_when_unidentifiable=True,
common_causes=['W0']).groupby('v0').mean()
assert True
@pytest.mark.parametrize(["N","variable_types"],
[(10000,{}),])
def test_pandas_api_with_no_specification_of_type(self, N, variable_types):
data = dowhy.datasets.linear_dataset(beta=5,
num_common_causes=1,
num_instruments = 0,
num_samples=1000,
treatment_is_binary=True)
data['df'].causal.do(x='v0',
variable_types=variable_types,
outcome='y',
proceed_when_unidentifiable=True,
common_causes=['W0']).groupby('v0').mean()
assert True
@pytest.mark.parametrize(["N","variable_types"],
[(1,{'v0': 'b', 'W0': 'c'}),])
def test_pandas_api_with_dummy_data(self, N, variable_types):
df = pd.DataFrame({'x': [0,0.5,1], 'y': [1,0.5,0], 'a': [0,0.5,0], 'b': [0.25,0,0]})
dd = df.causal.do(x=['x'], outcome='y', common_causes=['a', 'b'],
proceed_when_unidentifiable=True,
variable_types=dict(x='c', y='c', a='c', b='c'))
print(dd)
| 45.013393
| 95
| 0.466329
| 1,060
| 10,083
| 4.25283
| 0.130189
| 0.077862
| 0.037267
| 0.039042
| 0.879547
| 0.875111
| 0.875111
| 0.870009
| 0.870009
| 0.848935
| 0
| 0.029206
| 0.405732
| 10,083
| 223
| 96
| 45.215247
| 0.723131
| 0
| 0
| 0.835897
| 0
| 0.020513
| 0.068059
| 0.008966
| 0
| 0
| 0
| 0
| 0.035897
| 1
| 0.041026
| false
| 0
| 0.030769
| 0
| 0.076923
| 0.041026
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67ad44274dbfdb4457919913b04d7dbf66d01765
| 52,205
|
py
|
Python
|
nidaqmx/_task_modules/ci_channel_collection.py
|
hboshnak/nidaqmx-python
|
b756fbd7f0c0f7deadb468d77ceacb03ed467885
|
[
"MIT"
] | null | null | null |
nidaqmx/_task_modules/ci_channel_collection.py
|
hboshnak/nidaqmx-python
|
b756fbd7f0c0f7deadb468d77ceacb03ed467885
|
[
"MIT"
] | null | null | null |
nidaqmx/_task_modules/ci_channel_collection.py
|
hboshnak/nidaqmx-python
|
b756fbd7f0c0f7deadb468d77ceacb03ed467885
|
[
"MIT"
] | null | null | null |
# Do not edit this file; it was automatically generated.
import ctypes
import numpy
from nidaqmx._lib import lib_importer, ctypes_byte_str, c_bool32
from nidaqmx.errors import check_for_error
from nidaqmx._task_modules.channels.ci_channel import CIChannel
from nidaqmx._task_modules.channel_collection import ChannelCollection
from nidaqmx.utils import unflatten_channel_string
from nidaqmx.constants import (
AngleUnits, AngularVelocityUnits, CountDirection, CounterFrequencyMethod,
Edge, EncoderType, EncoderZIndexPhase, FrequencyUnits, GpsSignalType,
LengthUnits, TimeUnits, VelocityUnits)
class CIChannelCollection(ChannelCollection):
"""
Contains the collection of counter input channels for a DAQmx Task.
"""
def __init__(self, task_handle):
super(CIChannelCollection, self).__init__(task_handle)
def _create_chan(self, counter, name_to_assign_to_channel=''):
"""
Creates and returns a CIChannel object.
Args:
counter (str): Specifies the names of the counters to use to
create virtual channels.
name_to_assign_to_channel (Optional[str]): Specifies a name to
assign to the virtual channel this method creates.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Specifies the newly created CIChannel object.
"""
if name_to_assign_to_channel:
num_counters = len(unflatten_channel_string(counter))
if num_counters > 1:
name = '{0}0:{1}'.format(
name_to_assign_to_channel, num_counters-1)
else:
name = name_to_assign_to_channel
else:
name = counter
return CIChannel(self._handle, name)
def add_ci_ang_encoder_chan(
self, counter, name_to_assign_to_channel="",
decoding_type=EncoderType.X_4, zidx_enable=False, zidx_val=0,
zidx_phase=EncoderZIndexPhase.AHIGH_BHIGH,
units=AngleUnits.DEGREES, pulses_per_rev=24, initial_angle=0.0,
custom_scale_name=""):
"""
Creates a channel that uses an angular encoder to measure
angular position. With the exception of devices that support
multi-counter tasks, you can create only one counter input
channel at a time with this function because a task can contain
only one counter input channel. To read from multiple counters
simultaneously, use a separate task for each counter. Connect
the input signals to the default input terminals of the counter
unless you select different input terminals.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
decoding_type (Optional[nidaqmx.constants.EncoderType]):
Specifies how to count and interpret the pulses the
encoder generates on signal A and signal B. **X_1**,
**X_2**, and **X_4** are valid for quadrature encoders
only. **TWO_PULSE_COUNTING** is valid only for two-pulse
encoders.
zidx_enable (Optional[bool]): Specifies whether to use Z
indexing for the channel.
zidx_val (Optional[float]): Specifies in **units** the value
to which to reset the measurement when signal Z is high
and signal A and signal B are at the states you specify
with **zidx_phase**.
zidx_phase (Optional[nidaqmx.constants.EncoderZIndexPhase]):
Specifies the states at which signal A and signal B must
be while signal Z is high for NI-DAQmx to reset the
measurement. If signal Z is never high while signal A
and signal B are high, for example, you must choose a
phase other than **A_HIGH_B_HIGH**.
units (Optional[nidaqmx.constants.AngleUnits]): Specifies
the units to use to return angular position measurements
from the channel.
pulses_per_rev (Optional[int]): Is the number of pulses the
encoder generates per revolution. This value is the
number of pulses on either signal A or signal B, not the
total number of pulses on both signal A and signal B.
initial_angle (Optional[float]): Is the starting angle of
the encoder. This value is in the units you specify with
the **units** input.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCIAngEncoderChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, c_bool32,
ctypes.c_double, ctypes.c_int, ctypes.c_int,
ctypes.c_uint, ctypes.c_double, ctypes_byte_str]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel,
decoding_type.value, zidx_enable, zidx_val, zidx_phase.value,
units.value, pulses_per_rev, initial_angle, custom_scale_name)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
def add_ci_ang_velocity_chan(
self, counter, name_to_assign_to_channel="", min_val=0.0,
max_val=1.0, decoding_type=EncoderType.X_4,
units=AngularVelocityUnits.RPM, pulses_per_rev=24,
custom_scale_name=""):
"""
Creates a channel to measure the angular velocity of a digital
signal. With the exception of devices that support multi-counter
tasks, you can create only one counter input channel at a time
with this function because a task can contain only one counter
input channel. To read from multiple counters simultaneously,
use a separate task for each counter. Connect the input signal
to the default input terminal of the counter unless you select a
different input terminal.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
decoding_type (Optional[nidaqmx.constants.EncoderType]):
Specifies how to count and interpret the pulses the
encoder generates on signal A and signal B. **X_1**,
**X_2**, and **X_4** are valid for quadrature encoders
only. **TWO_PULSE_COUNTING** is valid only for two-pulse
encoders.
units (Optional[nidaqmx.constants.AngularVelocityUnits]):
Specifies in which unit to return velocity measurements
from the channel.
pulses_per_rev (Optional[int]): Is the number of pulses the
encoder generates per revolution. This value is the
number of pulses on either signal A or signal B, not the
total number of pulses on both signal A and signal B.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCIAngVelocityChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_uint,
ctypes_byte_str]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel, min_val,
max_val, decoding_type.value, units.value, pulses_per_rev,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
def add_ci_count_edges_chan(
self, counter, name_to_assign_to_channel="", edge=Edge.RISING,
initial_count=0, count_direction=CountDirection.COUNT_UP):
"""
Creates a channel to count the number of rising or falling edges
of a digital signal. With the exception of devices that support
multi-counter tasks, you can create only one counter input
channel at a time with this function because a task can contain
only one counter input channel. To read from multiple counters
simultaneously, use a separate task for each counter. Connect
the input signal to the default input terminal of the counter
unless you select a different input terminal.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
edge (Optional[nidaqmx.constants.Edge]): Specifies on which
edges of the input signal to increment or decrement the
count.
initial_count (Optional[int]): Is the value from which to
start counting.
count_direction (Optional[nidaqmx.constants.CountDirection]):
Specifies whether to increment or decrement the counter
on each edge.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCICountEdgesChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_uint,
ctypes.c_int]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel, edge.value,
initial_count, count_direction.value)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
def add_ci_duty_cycle_chan(
self, counter, name_to_assign_to_channel="", min_freq=2.0,
max_freq=10000.0, edge=Edge.RISING, custom_scale_name=""):
"""
Creates channel(s) to duty cycle of a digital pulse. Connect the
input signal to the default input terminal of the counter unless
you select a different input terminal. With the exception of
devices that support multi-counter tasks, you can create only
one counter input channel at a time with this function because a
task can contain only one counter input channel. To read from
multiple counters simultaneously, use a separate task for each
counter.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_freq (Optional[float]): Specifies the minimum frequency
you expect to measure.
max_freq (Optional[float]): Specifies the maximum frequency
you expect to measure.
edge (Optional[nidaqmx.constants.Edge]): Specifies between
which edges to measure the frequency or period of the
signal.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCIDutyCycleChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel, min_freq,
max_freq, edge.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
def add_ci_freq_chan(
self, counter, name_to_assign_to_channel="", min_val=2.0,
max_val=100.0, units=FrequencyUnits.HZ, edge=Edge.RISING,
meas_method=CounterFrequencyMethod.LOW_FREQUENCY_1_COUNTER,
meas_time=0.001, divisor=4, custom_scale_name=""):
"""
Creates a channel to measure the frequency of a digital signal.
With the exception of devices that support multi-counter tasks,
you can create only one counter input channel at a time with
this function because a task can contain only one counter input
channel. To read from multiple counters simultaneously, use a
separate task for each counter. Connect the input signal to the
default input terminal of the counter unless you select a
different input terminal.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.FrequencyUnits]):
Specifies the units to use to return frequency
measurements.
edge (Optional[nidaqmx.constants.Edge]): Specifies between
which edges to measure the frequency or period of the
signal.
meas_method (Optional[nidaqmx.constants.CounterFrequencyMethod]):
Specifies the method to use to calculate the period or
frequency of the signal.
meas_time (Optional[float]): Is the length of time in
seconds to measure the frequency or period of the signal
if **meas_method** is **HIGH_FREQUENCYWITH_2_COUNTERS**.
Leave this input unspecified if **meas_method** is not
**HIGH_FREQUENCYWITH_2_COUNTERS**.
divisor (Optional[int]): Is the value by which to divide the
input signal when **meas_method** is
**LARGE_RANGEWITH_2_COUNTERS**. Leave this input
unspecified if **meas_method** is not
**LARGE_RANGEWITH_2_COUNTERS**.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCIFreqChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_uint, ctypes_byte_str]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel, min_val,
max_val, units.value, edge.value, meas_method.value, meas_time,
divisor, custom_scale_name)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
def add_ci_gps_timestamp_chan(
self, counter, name_to_assign_to_channel="",
units=TimeUnits.SECONDS, sync_method=GpsSignalType.IRIGB,
custom_scale_name=""):
"""
Creates a channel that uses a special purpose counter to take a
timestamp and synchronizes that counter to a GPS receiver. With
the exception of devices that support multi-counter tasks, you
can create only one counter input channel at a time with this
function because a task can contain only one counter input
channel. To read from multiple counters simultaneously, use a
separate task for each counter. Connect the input signals to the
default input terminals of the counter unless you select
different input terminals.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
units (Optional[nidaqmx.constants.TimeUnits]): Specifies the
units to use to return the timestamp.
sync_method (Optional[nidaqmx.constants.GpsSignalType]):
Specifies the method to use to synchronize the counter
to a GPS receiver.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCIGPSTimestampChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, ctypes.c_int,
ctypes_byte_str]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel, units.value,
sync_method.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
def add_ci_lin_encoder_chan(
self, counter, name_to_assign_to_channel="",
decoding_type=EncoderType.X_4, zidx_enable=False, zidx_val=0,
zidx_phase=EncoderZIndexPhase.AHIGH_BHIGH,
units=LengthUnits.METERS, dist_per_pulse=0.001, initial_pos=0.0,
custom_scale_name=""):
"""
Creates a channel that uses a linear encoder to measure linear
position. With the exception of devices that support multi-
counter tasks, you can create only one counter input channel at
a time with this function because a task can contain only one
counter input channel. To read from multiple counters
simultaneously, use a separate task for each counter. Connect
the input signals to the default input terminals of the counter
unless you select different input terminals.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
decoding_type (Optional[nidaqmx.constants.EncoderType]):
Specifies how to count and interpret the pulses the
encoder generates on signal A and signal B. **X_1**,
**X_2**, and **X_4** are valid for quadrature encoders
only. **TWO_PULSE_COUNTING** is valid only for two-pulse
encoders.
zidx_enable (Optional[bool]): Specifies whether to use Z
indexing for the channel.
zidx_val (Optional[float]): Specifies in **units** the value
to which to reset the measurement when signal Z is high
and signal A and signal B are at the states you specify
with **zidx_phase**.
zidx_phase (Optional[nidaqmx.constants.EncoderZIndexPhase]):
Specifies the states at which signal A and signal B must
be while signal Z is high for NI-DAQmx to reset the
measurement. If signal Z is never high while signal A
and signal B are high, for example, you must choose a
phase other than **A_HIGH_B_HIGH**.
units (Optional[nidaqmx.constants.LengthUnits]): Specifies
the units to use to return linear position measurements
from the channel.
dist_per_pulse (Optional[float]): Is the distance to measure
for each pulse the encoder generates on signal A or
signal B. This value is in the units you specify with
the **units** input.
initial_pos (Optional[float]): Is the position of the
encoder when you begin the measurement. This value is in
the units you specify with the **units** input.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCILinEncoderChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_int, c_bool32,
ctypes.c_double, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_double, ctypes_byte_str]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel,
decoding_type.value, zidx_enable, zidx_val, zidx_phase.value,
units.value, dist_per_pulse, initial_pos, custom_scale_name)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
def add_ci_lin_velocity_chan(
self, counter, name_to_assign_to_channel="", min_val=0.0,
max_val=1.0, decoding_type=EncoderType.X_4,
units=VelocityUnits.METERS_PER_SECOND, dist_per_pulse=0.001,
custom_scale_name=""):
"""
Creates a channel that uses a linear encoder to measure linear
velocity. With the exception of devices that support multi-
counter tasks, you can create only one counter input channel at
a time with this function because a task can contain only one
counter input channel. To read from multiple counters
simultaneously, use a separate task for each counter. Connect
the input signal to the default input terminal of the counter
unless you select a different input terminal.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
decoding_type (Optional[nidaqmx.constants.EncoderType]):
Specifies how to count and interpret the pulses the
encoder generates on signal A and signal B. **X_1**,
**X_2**, and **X_4** are valid for quadrature encoders
only. **TWO_PULSE_COUNTING** is valid only for two-pulse
encoders.
units (Optional[nidaqmx.constants.VelocityUnits]): Specifies
in which unit to return velocity measurements from the
channel.
dist_per_pulse (Optional[float]): Is the distance to measure
for each pulse the encoder generates on signal A or
signal B. This value is in the units you specify with
the **units** input.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCILinVelocityChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_double,
ctypes_byte_str]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel, min_val,
max_val, decoding_type.value, units.value, dist_per_pulse,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
def add_ci_period_chan(
self, counter, name_to_assign_to_channel="", min_val=0.000001,
max_val=0.1, units=TimeUnits.SECONDS, edge=Edge.RISING,
meas_method=CounterFrequencyMethod.LOW_FREQUENCY_1_COUNTER,
meas_time=0.001, divisor=4, custom_scale_name=""):
"""
Creates a channel to measure the period of a digital signal.
With the exception of devices that support multi-counter tasks,
you can create only one counter input channel at a time with
this function because a task can contain only one counter input
channel. To read from multiple counters simultaneously, use a
separate task for each counter. Connect the input signal to the
default input terminal of the counter unless you select a
different input terminal.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TimeUnits]): Specifies the
units to use to return time or period measurements.
edge (Optional[nidaqmx.constants.Edge]): Specifies between
which edges to measure the frequency or period of the
signal.
meas_method (Optional[nidaqmx.constants.CounterFrequencyMethod]):
Specifies the method to use to calculate the period or
frequency of the signal.
meas_time (Optional[float]): Is the length of time in
seconds to measure the frequency or period of the signal
if **meas_method** is **HIGH_FREQUENCYWITH_2_COUNTERS**.
Leave this input unspecified if **meas_method** is not
**HIGH_FREQUENCYWITH_2_COUNTERS**.
divisor (Optional[int]): Is the value by which to divide the
input signal when **meas_method** is
**LARGE_RANGEWITH_2_COUNTERS**. Leave this input
unspecified if **meas_method** is not
**LARGE_RANGEWITH_2_COUNTERS**.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCIPeriodChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes.c_double, ctypes.c_uint, ctypes_byte_str]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel, min_val,
max_val, units.value, edge.value, meas_method.value, meas_time,
divisor, custom_scale_name)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
def add_ci_pulse_chan_freq(
self, counter, name_to_assign_to_channel="", min_val=1000,
max_val=1000000, units=FrequencyUnits.HZ):
"""
Creates a channel to measure pulse specifications, returning the
measurements as pairs of frequency and duty cycle. With the
exception of devices that support multi-counter tasks, you can
create only one counter input channel at a time with this
function because a task can contain only one counter input
channel. To read from multiple counters simultaneously, use a
separate task for each counter. Connect the input signal to the
default input terminal of the counter unless you select a
different input terminal.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.FrequencyUnits]):
Specifies the units to use to return pulse
specifications in terms of frequency.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCIPulseChanFreq
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel, min_val,
max_val, units.value)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
def add_ci_pulse_chan_ticks(
self, counter, name_to_assign_to_channel="",
source_terminal="OnboardClock", min_val=1000, max_val=1000000):
"""
Creates a channel to measure pulse specifications, returning the
measurements as pairs of high ticks and low ticks. With the
exception of devices that support multi-counter tasks, you can
create only one counter input channel at a time with this
function because a task can contain only one counter input
channel. To read from multiple counters simultaneously, use a
separate task for each counter. Connect the input signal to the
default input terminal of the counter unless you select a
different input terminal.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
source_terminal (Optional[str]): Is the terminal to which
you connect a signal to use as the source of ticks. A
DAQmx terminal constant lists all terminals available on
devices installed in the system. You also can specify a
source terminal by specifying a string that contains a
terminal name. If you specify OnboardClock, or do not
specify any terminal, NI-DAQmx selects the fastest
onboard timebase available on the device.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCIPulseChanTicks
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes_byte_str, ctypes.c_double,
ctypes.c_double]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel, source_terminal,
min_val, max_val)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
def add_ci_pulse_chan_time(
self, counter, name_to_assign_to_channel="", min_val=0.000001,
max_val=0.001, units=TimeUnits.SECONDS):
"""
Creates a channel to measure pulse specifications, returning the
measurements as pairs of high time and low time. With the
exception of devices that support multi-counter tasks, you can
create only one counter input channel at a time with this
function because a task can contain only one counter input
channel. To read from multiple counters simultaneously, use a
separate task for each counter. Connect the input signal to the
default input terminal of the counter unless you select a
different input terminal.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TimeUnits]): Specifies the
units to use to return pulse specifications in terms of
high time and low time.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCIPulseChanTime
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel, min_val,
max_val, units.value)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
def add_ci_pulse_width_chan(
self, counter, name_to_assign_to_channel="", min_val=0.000001,
max_val=0.1, units=TimeUnits.SECONDS, starting_edge=Edge.RISING,
custom_scale_name=""):
"""
Creates a channel to measure the width of a digital pulse.
**starting_edge** determines whether to measure a high pulse or
low pulse. With the exception of devices that support multi-
counter tasks, you can create only one counter input channel at
a time with this function because a task can contain only one
counter input channel. To read from multiple counters
simultaneously, use a separate task for each counter. Connect
the input signal to the default input terminal of the counter
unless you select a different input terminal.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TimeUnits]): Specifies the
units to use to return time or period measurements.
starting_edge (Optional[nidaqmx.constants.Edge]): Specifies
on which edge to begin measuring pulse width.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCIPulseWidthChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel, min_val,
max_val, units.value, starting_edge.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
def add_ci_semi_period_chan(
self, counter, name_to_assign_to_channel="", min_val=0.000001,
max_val=0.1, units=TimeUnits.SECONDS, custom_scale_name=""):
"""
Creates a channel to measure the time between state transitions
of a digital signal. With the exception of devices that support
multi-counter tasks, you can create only one counter input
channel at a time with this function because a task can contain
only one counter input channel. To read from multiple counters
simultaneously, use a separate task for each counter. Connect
the input signal to the default input terminal of the counter
unless you select a different input terminal.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TimeUnits]): Specifies the
units to use to return time or period measurements.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCISemiPeriodChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes_byte_str]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel, min_val,
max_val, units.value, custom_scale_name)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
def add_ci_two_edge_sep_chan(
self, counter, name_to_assign_to_channel="", min_val=0.000001,
max_val=1.0, units=TimeUnits.SECONDS, first_edge=Edge.RISING,
second_edge=Edge.FALLING, custom_scale_name=""):
"""
Creates a channel that measures the amount of time between the
rising or falling edge of one digital signal and the rising or
falling edge of another digital signal. With the exception of
devices that support multi-counter tasks, you can create only
one counter input channel at a time with this function because a
task can contain only one counter input channel. To read from
multiple counters simultaneously, use a separate task for each
counter. Connect the input signals to the default input
terminals of the counter unless you select different input
terminals.
Args:
counter (str): Specifies the name of the counter to use to
create the virtual channel. The DAQmx physical channel
constant lists all physical channels, including
counters, for devices installed in the system.
name_to_assign_to_channel (Optional[str]): Specifies a name
to assign to the virtual channel this function creates.
If you do not specify a value for this input, NI-DAQmx
uses the physical channel name as the virtual channel
name.
min_val (Optional[float]): Specifies in **units** the
minimum value you expect to measure.
max_val (Optional[float]): Specifies in **units** the
maximum value you expect to measure.
units (Optional[nidaqmx.constants.TimeUnits]): Specifies the
units to use to return time or period measurements.
first_edge (Optional[nidaqmx.constants.Edge]): Specifies on
which edge of the first signal to start each
measurement.
second_edge (Optional[nidaqmx.constants.Edge]): Specifies on
which edge of the second signal to stop each
measurement.
custom_scale_name (Optional[str]): Specifies the name of a
custom scale for the channel. If you want the channel to
use a custom scale, specify the name of the custom scale
to this input and set **units** to
**FROM_CUSTOM_SCALE**.
Returns:
nidaqmx._task_modules.channels.ci_channel.CIChannel:
Indicates the newly created channel object.
"""
cfunc = lib_importer.windll.DAQmxCreateCITwoEdgeSepChan
if cfunc.argtypes is None:
with cfunc.arglock:
if cfunc.argtypes is None:
cfunc.argtypes = [
lib_importer.task_handle, ctypes_byte_str,
ctypes_byte_str, ctypes.c_double, ctypes.c_double,
ctypes.c_int, ctypes.c_int, ctypes.c_int,
ctypes_byte_str]
error_code = cfunc(
self._handle, counter, name_to_assign_to_channel, min_val,
max_val, units.value, first_edge.value, second_edge.value,
custom_scale_name)
check_for_error(error_code)
return self._create_chan(counter, name_to_assign_to_channel)
| 51.636993
| 78
| 0.624978
| 6,478
| 52,205
| 4.878666
| 0.049244
| 0.015378
| 0.030756
| 0.035882
| 0.899633
| 0.888558
| 0.881344
| 0.874067
| 0.865365
| 0.856063
| 0
| 0.004454
| 0.32472
| 52,205
| 1,010
| 79
| 51.688119
| 0.892038
| 0.601973
| 0
| 0.672297
| 1
| 0
| 0.001278
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057432
| false
| 0
| 0.128378
| 0
| 0.243243
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67b27458821e3254e729ed5e5edd2d2741f19ada
| 169
|
py
|
Python
|
cerebunit/validation_tests/cells/__init__.py
|
myHBPwork/cerebunit
|
8d986953652231c178e48a6272b6ba583ae1df96
|
[
"BSD-3-Clause"
] | null | null | null |
cerebunit/validation_tests/cells/__init__.py
|
myHBPwork/cerebunit
|
8d986953652231c178e48a6272b6ba583ae1df96
|
[
"BSD-3-Clause"
] | null | null | null |
cerebunit/validation_tests/cells/__init__.py
|
myHBPwork/cerebunit
|
8d986953652231c178e48a6272b6ba583ae1df96
|
[
"BSD-3-Clause"
] | 1
|
2021-05-21T03:06:37.000Z
|
2021-05-21T03:06:37.000Z
|
# ~/cerebunit/cerebunit/validation_tests/cells/__init__.py
#from cerebunit.validation_tests.cells import general
#from . import PurkinjeCell
#from . import GranularCell
| 33.8
| 58
| 0.828402
| 20
| 169
| 6.7
| 0.55
| 0.283582
| 0.358209
| 0.432836
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08284
| 169
| 4
| 59
| 42.25
| 0.864516
| 0.946746
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67cd94f7494fcc9ea863a50152fc2a2e5c519c28
| 132,265
|
py
|
Python
|
py/gen_py/test/userService.py
|
AlohaPoster/MyActilife_win
|
7922839b6444f63f87b8f9584ad5d89101fa8432
|
[
"ISC"
] | null | null | null |
py/gen_py/test/userService.py
|
AlohaPoster/MyActilife_win
|
7922839b6444f63f87b8f9584ad5d89101fa8432
|
[
"ISC"
] | null | null | null |
py/gen_py/test/userService.py
|
AlohaPoster/MyActilife_win
|
7922839b6444f63f87b8f9584ad5d89101fa8432
|
[
"ISC"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.13.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TFrozenDict, TException, TApplicationException
from thrift.protocol.TProtocol import TProtocolException
from thrift.TRecursive import fix_spec
import sys
import logging
from .ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
all_structs = []
class Iface(object):
def log_test(self, info):
"""
Parameters:
- info
"""
pass
def registe(self, info):
"""
Parameters:
- info
"""
pass
def getsdreader(self, info):
"""
Parameters:
- info
"""
pass
def getdevice(self, info):
"""
Parameters:
- info
"""
pass
def gettime(self, info):
"""
Parameters:
- info
"""
pass
def retime(self, info):
"""
Parameters:
- info
"""
pass
def getpower(self, info):
"""
Parameters:
- info
"""
pass
def getdataresult(self, info):
"""
Parameters:
- info
"""
pass
def getuser(self, info):
"""
Parameters:
- info
"""
pass
def getroles(self, info):
"""
Parameters:
- info
"""
pass
def getaccessjson(self, info):
"""
Parameters:
- info
"""
pass
def deleteuser(self, info):
"""
Parameters:
- info
"""
pass
def deleterole(self, info):
"""
Parameters:
- info
"""
pass
def userchangerole(self, info):
"""
Parameters:
- info
"""
pass
def rolechangeaccess(self, info):
"""
Parameters:
- info
"""
pass
def downloadresources(self, info):
"""
Parameters:
- info
"""
pass
def downloadinstruction(self, info):
"""
Parameters:
- info
"""
pass
def getmineresult(self, info):
"""
Parameters:
- info
"""
pass
def getconditionresult(self, info):
"""
Parameters:
- info
"""
pass
def newuser(self, info):
"""
Parameters:
- info
"""
pass
def newrole(self, info):
"""
Parameters:
- info
"""
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def log_test(self, info):
"""
Parameters:
- info
"""
self.send_log_test(info)
return self.recv_log_test()
def send_log_test(self, info):
self._oprot.writeMessageBegin('log_test', TMessageType.CALL, self._seqid)
args = log_test_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_log_test(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = log_test_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "log_test failed: unknown result")
def registe(self, info):
"""
Parameters:
- info
"""
self.send_registe(info)
return self.recv_registe()
def send_registe(self, info):
self._oprot.writeMessageBegin('registe', TMessageType.CALL, self._seqid)
args = registe_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_registe(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = registe_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "registe failed: unknown result")
def getsdreader(self, info):
"""
Parameters:
- info
"""
self.send_getsdreader(info)
return self.recv_getsdreader()
def send_getsdreader(self, info):
self._oprot.writeMessageBegin('getsdreader', TMessageType.CALL, self._seqid)
args = getsdreader_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getsdreader(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getsdreader_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getsdreader failed: unknown result")
def getdevice(self, info):
"""
Parameters:
- info
"""
self.send_getdevice(info)
return self.recv_getdevice()
def send_getdevice(self, info):
self._oprot.writeMessageBegin('getdevice', TMessageType.CALL, self._seqid)
args = getdevice_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getdevice(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getdevice_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getdevice failed: unknown result")
def gettime(self, info):
"""
Parameters:
- info
"""
self.send_gettime(info)
return self.recv_gettime()
def send_gettime(self, info):
self._oprot.writeMessageBegin('gettime', TMessageType.CALL, self._seqid)
args = gettime_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_gettime(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = gettime_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "gettime failed: unknown result")
def retime(self, info):
"""
Parameters:
- info
"""
self.send_retime(info)
return self.recv_retime()
def send_retime(self, info):
self._oprot.writeMessageBegin('retime', TMessageType.CALL, self._seqid)
args = retime_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_retime(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = retime_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "retime failed: unknown result")
def getpower(self, info):
"""
Parameters:
- info
"""
self.send_getpower(info)
return self.recv_getpower()
def send_getpower(self, info):
self._oprot.writeMessageBegin('getpower', TMessageType.CALL, self._seqid)
args = getpower_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getpower(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getpower_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getpower failed: unknown result")
def getdataresult(self, info):
"""
Parameters:
- info
"""
self.send_getdataresult(info)
return self.recv_getdataresult()
def send_getdataresult(self, info):
self._oprot.writeMessageBegin('getdataresult', TMessageType.CALL, self._seqid)
args = getdataresult_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getdataresult(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getdataresult_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getdataresult failed: unknown result")
def getuser(self, info):
"""
Parameters:
- info
"""
self.send_getuser(info)
return self.recv_getuser()
def send_getuser(self, info):
self._oprot.writeMessageBegin('getuser', TMessageType.CALL, self._seqid)
args = getuser_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getuser(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getuser_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getuser failed: unknown result")
def getroles(self, info):
"""
Parameters:
- info
"""
self.send_getroles(info)
return self.recv_getroles()
def send_getroles(self, info):
self._oprot.writeMessageBegin('getroles', TMessageType.CALL, self._seqid)
args = getroles_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getroles(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getroles_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getroles failed: unknown result")
def getaccessjson(self, info):
"""
Parameters:
- info
"""
self.send_getaccessjson(info)
return self.recv_getaccessjson()
def send_getaccessjson(self, info):
self._oprot.writeMessageBegin('getaccessjson', TMessageType.CALL, self._seqid)
args = getaccessjson_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getaccessjson(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getaccessjson_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getaccessjson failed: unknown result")
def deleteuser(self, info):
"""
Parameters:
- info
"""
self.send_deleteuser(info)
return self.recv_deleteuser()
def send_deleteuser(self, info):
self._oprot.writeMessageBegin('deleteuser', TMessageType.CALL, self._seqid)
args = deleteuser_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_deleteuser(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = deleteuser_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "deleteuser failed: unknown result")
def deleterole(self, info):
"""
Parameters:
- info
"""
self.send_deleterole(info)
return self.recv_deleterole()
def send_deleterole(self, info):
self._oprot.writeMessageBegin('deleterole', TMessageType.CALL, self._seqid)
args = deleterole_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_deleterole(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = deleterole_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "deleterole failed: unknown result")
def userchangerole(self, info):
"""
Parameters:
- info
"""
self.send_userchangerole(info)
return self.recv_userchangerole()
def send_userchangerole(self, info):
self._oprot.writeMessageBegin('userchangerole', TMessageType.CALL, self._seqid)
args = userchangerole_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_userchangerole(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = userchangerole_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "userchangerole failed: unknown result")
def rolechangeaccess(self, info):
"""
Parameters:
- info
"""
self.send_rolechangeaccess(info)
return self.recv_rolechangeaccess()
def send_rolechangeaccess(self, info):
self._oprot.writeMessageBegin('rolechangeaccess', TMessageType.CALL, self._seqid)
args = rolechangeaccess_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_rolechangeaccess(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = rolechangeaccess_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "rolechangeaccess failed: unknown result")
def downloadresources(self, info):
"""
Parameters:
- info
"""
self.send_downloadresources(info)
return self.recv_downloadresources()
def send_downloadresources(self, info):
self._oprot.writeMessageBegin('downloadresources', TMessageType.CALL, self._seqid)
args = downloadresources_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_downloadresources(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = downloadresources_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "downloadresources failed: unknown result")
def downloadinstruction(self, info):
"""
Parameters:
- info
"""
self.send_downloadinstruction(info)
return self.recv_downloadinstruction()
def send_downloadinstruction(self, info):
self._oprot.writeMessageBegin('downloadinstruction', TMessageType.CALL, self._seqid)
args = downloadinstruction_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_downloadinstruction(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = downloadinstruction_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "downloadinstruction failed: unknown result")
def getmineresult(self, info):
"""
Parameters:
- info
"""
self.send_getmineresult(info)
return self.recv_getmineresult()
def send_getmineresult(self, info):
self._oprot.writeMessageBegin('getmineresult', TMessageType.CALL, self._seqid)
args = getmineresult_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getmineresult(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getmineresult_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getmineresult failed: unknown result")
def getconditionresult(self, info):
"""
Parameters:
- info
"""
self.send_getconditionresult(info)
return self.recv_getconditionresult()
def send_getconditionresult(self, info):
self._oprot.writeMessageBegin('getconditionresult', TMessageType.CALL, self._seqid)
args = getconditionresult_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_getconditionresult(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = getconditionresult_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "getconditionresult failed: unknown result")
def newuser(self, info):
"""
Parameters:
- info
"""
self.send_newuser(info)
return self.recv_newuser()
def send_newuser(self, info):
self._oprot.writeMessageBegin('newuser', TMessageType.CALL, self._seqid)
args = newuser_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_newuser(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = newuser_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "newuser failed: unknown result")
def newrole(self, info):
"""
Parameters:
- info
"""
self.send_newrole(info)
return self.recv_newrole()
def send_newrole(self, info):
self._oprot.writeMessageBegin('newrole', TMessageType.CALL, self._seqid)
args = newrole_args()
args.info = info
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_newrole(self):
iprot = self._iprot
(fname, mtype, rseqid) = iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(iprot)
iprot.readMessageEnd()
raise x
result = newrole_result()
result.read(iprot)
iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "newrole failed: unknown result")
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["log_test"] = Processor.process_log_test
self._processMap["registe"] = Processor.process_registe
self._processMap["getsdreader"] = Processor.process_getsdreader
self._processMap["getdevice"] = Processor.process_getdevice
self._processMap["gettime"] = Processor.process_gettime
self._processMap["retime"] = Processor.process_retime
self._processMap["getpower"] = Processor.process_getpower
self._processMap["getdataresult"] = Processor.process_getdataresult
self._processMap["getuser"] = Processor.process_getuser
self._processMap["getroles"] = Processor.process_getroles
self._processMap["getaccessjson"] = Processor.process_getaccessjson
self._processMap["deleteuser"] = Processor.process_deleteuser
self._processMap["deleterole"] = Processor.process_deleterole
self._processMap["userchangerole"] = Processor.process_userchangerole
self._processMap["rolechangeaccess"] = Processor.process_rolechangeaccess
self._processMap["downloadresources"] = Processor.process_downloadresources
self._processMap["downloadinstruction"] = Processor.process_downloadinstruction
self._processMap["getmineresult"] = Processor.process_getmineresult
self._processMap["getconditionresult"] = Processor.process_getconditionresult
self._processMap["newuser"] = Processor.process_newuser
self._processMap["newrole"] = Processor.process_newrole
self._on_message_begin = None
def on_message_begin(self, func):
self._on_message_begin = func
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if self._on_message_begin:
self._on_message_begin(name, type, seqid)
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_log_test(self, seqid, iprot, oprot):
args = log_test_args()
args.read(iprot)
iprot.readMessageEnd()
result = log_test_result()
try:
result.success = self._handler.log_test(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("log_test", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_registe(self, seqid, iprot, oprot):
args = registe_args()
args.read(iprot)
iprot.readMessageEnd()
result = registe_result()
try:
result.success = self._handler.registe(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("registe", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getsdreader(self, seqid, iprot, oprot):
args = getsdreader_args()
args.read(iprot)
iprot.readMessageEnd()
result = getsdreader_result()
try:
result.success = self._handler.getsdreader(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getsdreader", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getdevice(self, seqid, iprot, oprot):
args = getdevice_args()
args.read(iprot)
iprot.readMessageEnd()
result = getdevice_result()
try:
result.success = self._handler.getdevice(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getdevice", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_gettime(self, seqid, iprot, oprot):
args = gettime_args()
args.read(iprot)
iprot.readMessageEnd()
result = gettime_result()
try:
result.success = self._handler.gettime(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("gettime", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_retime(self, seqid, iprot, oprot):
args = retime_args()
args.read(iprot)
iprot.readMessageEnd()
result = retime_result()
try:
result.success = self._handler.retime(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("retime", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getpower(self, seqid, iprot, oprot):
args = getpower_args()
args.read(iprot)
iprot.readMessageEnd()
result = getpower_result()
try:
result.success = self._handler.getpower(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getpower", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getdataresult(self, seqid, iprot, oprot):
args = getdataresult_args()
args.read(iprot)
iprot.readMessageEnd()
result = getdataresult_result()
try:
result.success = self._handler.getdataresult(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getdataresult", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getuser(self, seqid, iprot, oprot):
args = getuser_args()
args.read(iprot)
iprot.readMessageEnd()
result = getuser_result()
try:
result.success = self._handler.getuser(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getuser", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getroles(self, seqid, iprot, oprot):
args = getroles_args()
args.read(iprot)
iprot.readMessageEnd()
result = getroles_result()
try:
result.success = self._handler.getroles(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getroles", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getaccessjson(self, seqid, iprot, oprot):
args = getaccessjson_args()
args.read(iprot)
iprot.readMessageEnd()
result = getaccessjson_result()
try:
result.success = self._handler.getaccessjson(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getaccessjson", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_deleteuser(self, seqid, iprot, oprot):
args = deleteuser_args()
args.read(iprot)
iprot.readMessageEnd()
result = deleteuser_result()
try:
result.success = self._handler.deleteuser(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("deleteuser", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_deleterole(self, seqid, iprot, oprot):
args = deleterole_args()
args.read(iprot)
iprot.readMessageEnd()
result = deleterole_result()
try:
result.success = self._handler.deleterole(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("deleterole", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_userchangerole(self, seqid, iprot, oprot):
args = userchangerole_args()
args.read(iprot)
iprot.readMessageEnd()
result = userchangerole_result()
try:
result.success = self._handler.userchangerole(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("userchangerole", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_rolechangeaccess(self, seqid, iprot, oprot):
args = rolechangeaccess_args()
args.read(iprot)
iprot.readMessageEnd()
result = rolechangeaccess_result()
try:
result.success = self._handler.rolechangeaccess(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("rolechangeaccess", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_downloadresources(self, seqid, iprot, oprot):
args = downloadresources_args()
args.read(iprot)
iprot.readMessageEnd()
result = downloadresources_result()
try:
result.success = self._handler.downloadresources(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("downloadresources", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_downloadinstruction(self, seqid, iprot, oprot):
args = downloadinstruction_args()
args.read(iprot)
iprot.readMessageEnd()
result = downloadinstruction_result()
try:
result.success = self._handler.downloadinstruction(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("downloadinstruction", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getmineresult(self, seqid, iprot, oprot):
args = getmineresult_args()
args.read(iprot)
iprot.readMessageEnd()
result = getmineresult_result()
try:
result.success = self._handler.getmineresult(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getmineresult", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_getconditionresult(self, seqid, iprot, oprot):
args = getconditionresult_args()
args.read(iprot)
iprot.readMessageEnd()
result = getconditionresult_result()
try:
result.success = self._handler.getconditionresult(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("getconditionresult", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_newuser(self, seqid, iprot, oprot):
args = newuser_args()
args.read(iprot)
iprot.readMessageEnd()
result = newuser_result()
try:
result.success = self._handler.newuser(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("newuser", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_newrole(self, seqid, iprot, oprot):
args = newrole_args()
args.read(iprot)
iprot.readMessageEnd()
result = newrole_result()
try:
result.success = self._handler.newrole(args.info)
msg_type = TMessageType.REPLY
except TTransport.TTransportException:
raise
except TApplicationException as ex:
logging.exception('TApplication exception in handler')
msg_type = TMessageType.EXCEPTION
result = ex
except Exception:
logging.exception('Unexpected exception in handler')
msg_type = TMessageType.EXCEPTION
result = TApplicationException(TApplicationException.INTERNAL_ERROR, 'Internal error')
oprot.writeMessageBegin("newrole", msg_type, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class log_test_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('log_test_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(log_test_args)
log_test_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class log_test_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('log_test_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(log_test_result)
log_test_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class registe_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('registe_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(registe_args)
registe_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class registe_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('registe_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(registe_result)
registe_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class getsdreader_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getsdreader_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getsdreader_args)
getsdreader_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class getsdreader_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getsdreader_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getsdreader_result)
getsdreader_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class getdevice_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getdevice_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getdevice_args)
getdevice_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class getdevice_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getdevice_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getdevice_result)
getdevice_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class gettime_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('gettime_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(gettime_args)
gettime_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class gettime_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('gettime_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(gettime_result)
gettime_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class retime_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('retime_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(retime_args)
retime_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class retime_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('retime_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(retime_result)
retime_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class getpower_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getpower_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getpower_args)
getpower_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class getpower_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getpower_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getpower_result)
getpower_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class getdataresult_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getdataresult_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getdataresult_args)
getdataresult_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class getdataresult_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getdataresult_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getdataresult_result)
getdataresult_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class getuser_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getuser_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getuser_args)
getuser_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class getuser_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getuser_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getuser_result)
getuser_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class getroles_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getroles_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getroles_args)
getroles_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class getroles_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getroles_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getroles_result)
getroles_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class getaccessjson_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getaccessjson_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getaccessjson_args)
getaccessjson_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class getaccessjson_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getaccessjson_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getaccessjson_result)
getaccessjson_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class deleteuser_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('deleteuser_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(deleteuser_args)
deleteuser_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class deleteuser_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('deleteuser_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(deleteuser_result)
deleteuser_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class deleterole_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('deleterole_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(deleterole_args)
deleterole_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class deleterole_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('deleterole_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(deleterole_result)
deleterole_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class userchangerole_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('userchangerole_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(userchangerole_args)
userchangerole_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class userchangerole_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('userchangerole_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(userchangerole_result)
userchangerole_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class rolechangeaccess_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('rolechangeaccess_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(rolechangeaccess_args)
rolechangeaccess_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class rolechangeaccess_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('rolechangeaccess_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(rolechangeaccess_result)
rolechangeaccess_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class downloadresources_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('downloadresources_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(downloadresources_args)
downloadresources_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class downloadresources_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('downloadresources_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(downloadresources_result)
downloadresources_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class downloadinstruction_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('downloadinstruction_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(downloadinstruction_args)
downloadinstruction_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class downloadinstruction_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('downloadinstruction_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(downloadinstruction_result)
downloadinstruction_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class getmineresult_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getmineresult_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getmineresult_args)
getmineresult_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class getmineresult_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getmineresult_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getmineresult_result)
getmineresult_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class getconditionresult_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getconditionresult_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getconditionresult_args)
getconditionresult_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class getconditionresult_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('getconditionresult_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(getconditionresult_result)
getconditionresult_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class newuser_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('newuser_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(newuser_args)
newuser_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class newuser_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('newuser_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(newuser_result)
newuser_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
class newrole_args(object):
"""
Attributes:
- info
"""
def __init__(self, info=None,):
self.info = info
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.info = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('newrole_args')
if self.info is not None:
oprot.writeFieldBegin('info', TType.STRING, 1)
oprot.writeString(self.info.encode('utf-8') if sys.version_info[0] == 2 else self.info)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(newrole_args)
newrole_args.thrift_spec = (
None, # 0
(1, TType.STRING, 'info', 'UTF8', None, ), # 1
)
class newrole_result(object):
"""
Attributes:
- success
"""
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot._fast_decode is not None and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None:
iprot._fast_decode(self, iprot, [self.__class__, self.thrift_spec])
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRING:
self.success = iprot.readString().decode('utf-8') if sys.version_info[0] == 2 else iprot.readString()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot._fast_encode is not None and self.thrift_spec is not None:
oprot.trans.write(oprot._fast_encode(self, [self.__class__, self.thrift_spec]))
return
oprot.writeStructBegin('newrole_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRING, 0)
oprot.writeString(self.success.encode('utf-8') if sys.version_info[0] == 2 else self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.items()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
all_structs.append(newrole_result)
newrole_result.thrift_spec = (
(0, TType.STRING, 'success', 'UTF8', None, ), # 0
)
fix_spec(all_structs)
del all_structs
| 33.165747
| 134
| 0.597293
| 14,022
| 132,265
| 5.390101
| 0.012766
| 0.015348
| 0.027626
| 0.013337
| 0.905504
| 0.881371
| 0.849868
| 0.836756
| 0.836756
| 0.836756
| 0
| 0.005241
| 0.29608
| 132,265
| 3,987
| 135
| 33.174066
| 0.806539
| 0.015333
| 0
| 0.849156
| 1
| 0
| 0.04185
| 0.001633
| 0
| 0
| 0
| 0
| 0
| 1
| 0.133311
| false
| 0.006947
| 0.002646
| 0.04168
| 0.24876
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
67eebdfdea75722d22f2ce44e801a6d109e7e537
| 15,572
|
py
|
Python
|
tests/components/ozw/test_light.py
|
chewbh/core
|
650d61e4f3007d1f7d456713d43fbc30b7396ce6
|
[
"Apache-2.0"
] | 2
|
2020-09-10T15:36:55.000Z
|
2021-02-26T21:09:56.000Z
|
tests/components/ozw/test_light.py
|
frnktrgr/core
|
650d61e4f3007d1f7d456713d43fbc30b7396ce6
|
[
"Apache-2.0"
] | 37
|
2020-07-17T23:20:00.000Z
|
2022-03-31T06:01:48.000Z
|
tests/components/ozw/test_light.py
|
glance-/home-assistant
|
43a3a8bd740c926698c12f7a5b25f6ab99bb7e4b
|
[
"Apache-2.0"
] | null | null | null |
"""Test Z-Wave Lights."""
from homeassistant.components.ozw.light import byte_to_zwave_brightness
from .common import setup_ozw
async def test_light(hass, light_data, light_msg, light_rgb_msg, sent_messages):
"""Test setting up config entry."""
receive_message = await setup_ozw(hass, fixture=light_data)
# Test loaded
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "off"
# Test turning on
# Beware that due to rounding, a roundtrip conversion does not always work
new_brightness = 44
new_transition = 0
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.led_bulb_6_multi_colour_level",
"brightness": new_brightness,
"transition": new_transition,
},
blocking=True,
)
assert len(sent_messages) == 2
msg = sent_messages[0]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 0, "ValueIDKey": 1407375551070225}
msg = sent_messages[1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": byte_to_zwave_brightness(new_brightness),
"ValueIDKey": 659128337,
}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(new_brightness)
light_msg.encode()
receive_message(light_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["brightness"] == new_brightness
# Test turning off
new_transition = 6553
await hass.services.async_call(
"light",
"turn_off",
{
"entity_id": "light.led_bulb_6_multi_colour_level",
"transition": new_transition,
},
blocking=True,
)
assert len(sent_messages) == 4
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 237, "ValueIDKey": 1407375551070225}
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 0, "ValueIDKey": 659128337}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = 0
light_msg.encode()
receive_message(light_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "off"
# Test turn on without brightness
new_transition = 127
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.led_bulb_6_multi_colour_level",
"transition": new_transition,
},
blocking=True,
)
assert len(sent_messages) == 6
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 127, "ValueIDKey": 1407375551070225}
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": 255,
"ValueIDKey": 659128337,
}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(new_brightness)
light_msg.encode()
receive_message(light_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["brightness"] == new_brightness
# Test set brightness to 0
new_brightness = 0
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.led_bulb_6_multi_colour_level",
"brightness": new_brightness,
},
blocking=True,
)
assert len(sent_messages) == 7
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": byte_to_zwave_brightness(new_brightness),
"ValueIDKey": 659128337,
}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(new_brightness)
light_msg.encode()
receive_message(light_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "off"
# Test setting color_name
new_color = "blue"
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.led_bulb_6_multi_colour_level", "color_name": new_color},
blocking=True,
)
assert len(sent_messages) == 9
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#0000ff0000", "ValueIDKey": 659341335}
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 255, "ValueIDKey": 659128337}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#0000ff0000"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["rgb_color"] == (0, 0, 255)
# Test setting hs_color
new_color = [300, 70]
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.led_bulb_6_multi_colour_level", "hs_color": new_color},
blocking=True,
)
assert len(sent_messages) == 11
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 255, "ValueIDKey": 659128337}
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#ff4cff0000", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#ff4cff0000"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["hs_color"] == (300.0, 70.196)
# Test setting rgb_color
new_color = [255, 154, 0]
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.led_bulb_6_multi_colour_level", "rgb_color": new_color},
blocking=True,
)
assert len(sent_messages) == 13
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 255, "ValueIDKey": 659128337}
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#ff99000000", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#ff99000000"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["rgb_color"] == (255, 153, 0)
# Test setting xy_color
new_color = [0.52, 0.43]
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.led_bulb_6_multi_colour_level", "xy_color": new_color},
blocking=True,
)
assert len(sent_messages) == 15
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 255, "ValueIDKey": 659128337}
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#ffbb370000", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#ffbb370000"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["xy_color"] == (0.519, 0.429)
# Test setting color temp
new_color = 465
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.led_bulb_6_multi_colour_level", "color_temp": new_color},
blocking=True,
)
assert len(sent_messages) == 17
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": 255, "ValueIDKey": 659128337}
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#000000e51a", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#000000e51a"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["color_temp"] == 465
async def test_no_rgb_light(hass, light_no_rgb_data, light_no_rgb_msg, sent_messages):
"""Test setting up config entry."""
receive_message = await setup_ozw(hass, fixture=light_no_rgb_data)
# Test loaded no RGBW support (dimmer only)
state = hass.states.get("light.master_bedroom_l_level")
assert state is not None
assert state.state == "off"
# Turn on the light
new_brightness = 44
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.master_bedroom_l_level", "brightness": new_brightness},
blocking=True,
)
assert len(sent_messages) == 1
msg = sent_messages[-1]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {
"Value": byte_to_zwave_brightness(new_brightness),
"ValueIDKey": 38371345,
}
# Feedback on state
light_no_rgb_msg.decode()
light_no_rgb_msg.payload["Value"] = byte_to_zwave_brightness(new_brightness)
light_no_rgb_msg.encode()
receive_message(light_no_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.master_bedroom_l_level")
assert state is not None
assert state.state == "on"
assert state.attributes["brightness"] == new_brightness
async def test_no_ww_light(
hass, light_no_ww_data, light_msg, light_rgb_msg, sent_messages
):
"""Test setting up config entry."""
receive_message = await setup_ozw(hass, fixture=light_no_ww_data)
# Test loaded no ww support
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "off"
# Turn on the light
white_color = 190
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.led_bulb_6_multi_colour_level",
"white_value": white_color,
},
blocking=True,
)
assert len(sent_messages) == 2
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#00000000be", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#00000000be"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["white_value"] == 190
async def test_no_cw_light(
hass, light_no_cw_data, light_msg, light_rgb_msg, sent_messages
):
"""Test setting up config entry."""
receive_message = await setup_ozw(hass, fixture=light_no_cw_data)
# Test loaded no cw support
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "off"
# Turn on the light
white_color = 190
await hass.services.async_call(
"light",
"turn_on",
{
"entity_id": "light.led_bulb_6_multi_colour_level",
"white_value": white_color,
},
blocking=True,
)
assert len(sent_messages) == 2
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#000000be00", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#000000be00"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["white_value"] == 190
async def test_wc_light(hass, light_wc_data, light_msg, light_rgb_msg, sent_messages):
"""Test setting up config entry."""
receive_message = await setup_ozw(hass, fixture=light_wc_data)
# Test loaded only white LED support
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "off"
# Turn on the light
new_color = 190
await hass.services.async_call(
"light",
"turn_on",
{"entity_id": "light.led_bulb_6_multi_colour_level", "color_temp": new_color},
blocking=True,
)
assert len(sent_messages) == 2
msg = sent_messages[-2]
assert msg["topic"] == "OpenZWave/1/command/setvalue/"
assert msg["payload"] == {"Value": "#0000001be4", "ValueIDKey": 659341335}
# Feedback on state
light_msg.decode()
light_msg.payload["Value"] = byte_to_zwave_brightness(255)
light_msg.encode()
light_rgb_msg.decode()
light_rgb_msg.payload["Value"] = "#0000001be4"
light_rgb_msg.encode()
receive_message(light_msg)
receive_message(light_rgb_msg)
await hass.async_block_till_done()
state = hass.states.get("light.led_bulb_6_multi_colour_level")
assert state is not None
assert state.state == "on"
assert state.attributes["color_temp"] == 191
| 32.173554
| 86
| 0.664655
| 2,025
| 15,572
| 4.825185
| 0.070617
| 0.042575
| 0.064477
| 0.037253
| 0.892744
| 0.891311
| 0.888138
| 0.884556
| 0.88036
| 0.863883
| 0
| 0.044719
| 0.210185
| 15,572
| 483
| 87
| 32.240166
| 0.749736
| 0.047842
| 0
| 0.72679
| 0
| 0
| 0.211085
| 0.114471
| 0
| 0
| 0
| 0
| 0.270557
| 1
| 0
| false
| 0
| 0.005305
| 0
| 0.005305
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
67ff44744ae6a7b746ffe4a12aef459d79a587ba
| 11,028
|
py
|
Python
|
training/util/ImageSampling.py
|
jpjuvo/deepfake-video-detector
|
7c5ea5f36277ff5405d8466e48e68d00a085fa7e
|
[
"MIT"
] | 1
|
2021-08-17T14:43:04.000Z
|
2021-08-17T14:43:04.000Z
|
training/util/ImageSampling.py
|
jpjuvo/deepfake-video-detector
|
7c5ea5f36277ff5405d8466e48e68d00a085fa7e
|
[
"MIT"
] | null | null | null |
training/util/ImageSampling.py
|
jpjuvo/deepfake-video-detector
|
7c5ea5f36277ff5405d8466e48e68d00a085fa7e
|
[
"MIT"
] | null | null | null |
import os
import random
import pandas as pd
import numpy as np
from pathlib import Path
from tqdm.notebook import tqdm as tqdm
def getBalancedImageDataFrame(base_dir,df,n_frames=17, n_frames_start=0, avoid_sampling_classes=[], downsample_majority=True):
# Start a list of paths, labels, and isVal splits
paths_reals, labels_reals, isVals_reals = [],[],[]
paths_fakes, labels_fakes, isVals_fakes = [],[],[]
def _getImagePath(video_id,frame_index,person_index=0):
name = video_id.replace('.mp4','_{0}_{1}.png'.format(person_index,frame_index))
return str(os.path.join(base_dir,name))
def _getFrameLabel(df, df_index, frame_index, person_index=0):
# get array of frame labels from the dataframe
frame_lbl_col = 'first_person_frame_labels' if person_index==0 else 'second_person_frame_labels'
frame_labels = df.iloc[df_index][frame_lbl_col]
# check for nan
if frame_labels == np.nan:
return np.nan
lbls_int = [int(lbl) for lbl in str(frame_labels).replace('[','').replace(']','').replace(' ','').split(',')]
return lbls_int[frame_index]
def _combineTwoListsAlternating(list1,list2):
combined = [None]*(len(list1)+len(list2))
combined[::2] = list1
combined[1::2] = list2
return combined
# Get all real videos and go through one by one
df_reals = df[df['label']=='REAL']
df_fakes = df[df['label']=='FAKE']
# shuffle reals
real_indices = list(df_reals.index.values)
random.shuffle(real_indices)
for real_ind in tqdm(real_indices):
real_id = df_reals.loc[real_ind,'index']
isVal = df_reals.loc[real_ind,'isValFold']
# Get all fake replicates for the real
fake_replicates = df_fakes[df_fakes['original']==real_id]
if len(fake_replicates)==0:
continue
# Get all frames for the real and check that they exist.
for frame_index in range(n_frames_start,n_frames):
real_frame = _getImagePath(real_id,frame_index)
if not os.path.isfile(real_frame):
continue
# For each frame in real, sample a fake pair randomly and check a) that it exists b) it's not zero class
# If not exist or on every second two-class, take the next one.
fake_index = 0
fake_label = 0
fake_path = ""
loop_counter = len(fake_replicates)
while loop_counter > 0:
loop_counter -= 1
# random pick fake one - if oversampling reals, pick
fake_index = random.randint(0,len(fake_replicates)-1)
fake_label = _getFrameLabel(fake_replicates, fake_index, frame_index)
# check that the fake image exists
fake_path = _getImagePath(fake_replicates.iloc[fake_index]['index'], frame_index)
if not os.path.isfile(fake_path):
continue
# skip the majority class randomly half of the time
skip_majority_class = random.randint(0,1)==0 and downsample_majority
# is the sampling condition satisfied? Avoid sampling unaltered fakes or too many majority types.
# avoid_sampling_classes avoids classes except for validation fold
if (fake_label != 0 and (fake_label not in avoid_sampling_classes or isVal) and (skip_majority_class and fake_label != 2)):
break
# Check that fake sample got selected
if not os.path.isfile(fake_path):
continue
# Check that we didn't end up with 0 labeled fake
if fake_label == 0:
continue
if (fake_label in avoid_sampling_classes and not isVal):
continue
# Append lists
paths_reals.append(real_frame)
labels_reals.append(int(0))
isVals_reals.append(isVal)
paths_fakes.append(fake_path)
labels_fakes.append(int(fake_label))
isVals_fakes.append(fake_replicates.iloc[fake_index]['isValFold'])
# shuffle lists but maintain the same order between lists
zipped_list = list(zip(paths_reals,labels_reals,isVals_reals,paths_fakes,labels_fakes,isVals_fakes))
random.shuffle(zipped_list)
paths_reals,labels_reals,isVals_reals,paths_fakes,labels_fakes,isVals_fakes = zip(*zipped_list)
# combine real and fake lists in an alternating fashion so that the order is real1,fake1,real2,fake2,...
paths = _combineTwoListsAlternating(paths_reals,paths_fakes)
labels = _combineTwoListsAlternating(labels_reals,labels_fakes)
isVals = _combineTwoListsAlternating(isVals_reals,isVals_fakes)
return pd.DataFrame({'path':paths,'label':labels, 'isValFold':isVals})
def getBalancedVideoDataFrame(df):
# Start a list of paths, labels, and isVal splits
paths_reals, labels_reals, isVals_reals = [],[],[]
paths_fakes, labels_fakes, isVals_fakes = [],[],[]
def _getVideoLabel(df, df_index, person_index=0):
# get array of frame labels from the dataframe
frame_lbl_col = 'first_person_label' if person_index==0 else 'second_person_label'
lbl = df.iloc[df_index][frame_lbl_col]
# check for nan
if lbl == np.nan:
return np.nan
return int(lbl)
def _combineTwoListsAlternating(list1,list2):
combined = [None]*(len(list1)+len(list2))
combined[::2] = list1
combined[1::2] = list2
return combined
# Get all real videos and go through one by one
df_reals = df[df['label']=='REAL']
df_fakes = df[df['label']=='FAKE']
# shuffle reals
real_indices = list(df_reals.index.values)
random.shuffle(real_indices)
for real_ind in tqdm(real_indices):
real_id = df_reals.loc[real_ind,'index']
# Get all fake replicates for the real
fake_replicates = df_fakes[df_fakes['original']==real_id]
if len(fake_replicates)==0:
continue
# sample a fake pair randomly and check a) it's not zero class
# on every second two-class, take the next one.
fake_index = -1
fake_label = 0
loop_counter = len(fake_replicates)
while loop_counter > 0:
loop_counter -= 1
# random pick fake one - if oversampling reals, pick
fake_index = random.randint(0,len(fake_replicates)-1)
fake_label = _getVideoLabel(fake_replicates, fake_index)
# skip the majority class randomly half of the time
skip_majority_class = random.randint(0,1)==0
# is the sampling condition satisfied? Avoid sampling unaltered fakes or too many majority types.
if (fake_label != 0 and (skip_majority_class and fake_label != 2)):
break
if fake_index == -1:
continue
# Append lists
paths_reals.append(real_id)
labels_reals.append(int(0))
isVals_reals.append(df_reals.loc[real_ind,'isValFold'])
paths_fakes.append(fake_replicates.iloc[fake_index]['index'])
labels_fakes.append(int(fake_label))
isVals_fakes.append(fake_replicates.iloc[fake_index]['isValFold'])
# shuffle lists but maintain the same order between lists
zipped_list = list(zip(paths_reals,labels_reals,isVals_reals,paths_fakes,labels_fakes,isVals_fakes))
random.shuffle(zipped_list)
paths_reals,labels_reals,isVals_reals,paths_fakes,labels_fakes,isVals_fakes = zip(*zipped_list)
# combine real and fake lists in an alternating fashion so that the order is real1,fake1,real2,fake2,...
paths = _combineTwoListsAlternating(paths_reals,paths_fakes)
labels = _combineTwoListsAlternating(labels_reals,labels_fakes)
isVals = _combineTwoListsAlternating(isVals_reals,isVals_fakes)
return pd.DataFrame({'path':paths,'label':labels, 'isValFold':isVals})
def getAllImagesDataFrame(base_dir,df,n_frames=17):
# Start a list of paths, labels, and isVal splits
paths, labels, isVals = [],[],[]
def _getImagePath(video_id,frame_index,person_index=0):
name = video_id.replace('.mp4','_{0}_{1}.png'.format(person_index,frame_index))
return str(os.path.join(base_dir,name))
def _getFrameLabel(df, df_index, frame_index, person_index=0):
# get array of frame labels from the dataframe
frame_lbl_col = 'first_person_frame_labels' if person_index==0 else 'second_person_frame_labels'
frame_labels = df.iloc[df_index][frame_lbl_col]
# check for nan
if frame_labels == np.nan:
return np.nan
lbls_int = [int(lbl) for lbl in str(frame_labels).replace('[','').replace(']','').replace(' ','').split(',')]
return lbls_int[frame_index]
# Get all real videos and go through one by one
df_reals = df[df['label']=='REAL']
df_fakes = df[df['label']=='FAKE']
# shuffle reals
real_indices = list(df_reals.index.values)
random.shuffle(real_indices)
for real_ind in tqdm(real_indices):
real_id = df_reals.loc[real_ind,'index']
isVal = df_reals.loc[real_ind,'isValFold']
# Get all fake replicates for the real
fake_replicates = df_fakes[df_fakes['original']==real_id]
if len(fake_replicates)==0:
continue
# Get all frames for the real and check that they exist.
for frame_index in range(n_frames):
real_frame = _getImagePath(real_id,frame_index)
if not os.path.isfile(real_frame):
continue
# Append lists
paths.append(real_frame)
labels.append(int(0))
isVals.append(isVal)
fake_index = 0
fake_label = 0
fake_path = ""
loop_counter = len(fake_replicates)
while loop_counter > 0:
loop_counter -= 1
# random pick fake one - if oversampling reals, pick
fake_index = loop_counter-1
fake_label = _getFrameLabel(fake_replicates, fake_index, frame_index)
# check that the fake image exists
fake_path = _getImagePath(fake_replicates.iloc[fake_index]['index'], frame_index)
if not os.path.isfile(fake_path):
continue
# Append lists
paths.append(fake_path)
labels.append(fake_label)
isVals.append(fake_replicates.iloc[fake_index]['isValFold'])
return pd.DataFrame({'path':paths,'label':labels, 'isValFold':isVals})
| 40.844444
| 139
| 0.624501
| 1,392
| 11,028
| 4.718391
| 0.124282
| 0.049026
| 0.01827
| 0.025579
| 0.881699
| 0.863886
| 0.853685
| 0.825061
| 0.798721
| 0.779233
| 0
| 0.010267
| 0.284639
| 11,028
| 270
| 140
| 40.844444
| 0.822284
| 0.183805
| 0
| 0.760736
| 0
| 0
| 0.0441
| 0.011388
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06135
| false
| 0
| 0.03681
| 0
| 0.177914
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
db1a916dc0ff34ae4a1f461be15421224eeea1ed
| 40,392
|
py
|
Python
|
seapy/roms/ncgen.py
|
ivicajan/seapy
|
168a3490061d570236c1982006c3ce81c7a958ce
|
[
"MIT"
] | 24
|
2015-08-17T01:24:56.000Z
|
2022-03-01T09:10:43.000Z
|
seapy/roms/ncgen.py
|
ivicajan/seapy
|
168a3490061d570236c1982006c3ce81c7a958ce
|
[
"MIT"
] | 43
|
2016-03-29T23:09:14.000Z
|
2021-10-29T18:57:45.000Z
|
seapy/roms/ncgen.py
|
ivicajan/seapy
|
168a3490061d570236c1982006c3ce81c7a958ce
|
[
"MIT"
] | 21
|
2016-03-29T22:44:57.000Z
|
2021-08-31T20:15:37.000Z
|
#!/usr/bin/env python
"""
Functions to generate ROMS netcdf files
Written by Brian Powell on 04/26/13
Copyright (c)2010--2021 University of Hawaii under the MIT-License.
"""
import os
import re
import netCDF4
import numpy as np
from datetime import datetime
from seapy.lib import default_epoch
from seapy.cdl_parser import cdl_parser
from seapy.roms import lib
from warnings import warn
"""
Module variables
"""
_cdl_dir = os.path.dirname(lib.__file__)
_cdl_dir = "/".join((('.' if not _cdl_dir else _cdl_dir), "cdl/"))
_format = "NETCDF4_CLASSIC"
def __number_or_string(val):
"""
convert a string to a number if the string represents a number;
otherwise, return the string.
"""
try:
val = float(val.strip())
except ValueError:
pass
return val
def ncgen(filename, dims=None, vars=None, attr=None, title=None,
clobber=False, format=_format):
"""
Create a new netcdf file with the given definitions. Need to define
the dimensions, the variables, and the attributes.
Parameters
----------
filename : string
name and path of file to create
dims : dict
dictionary of dimensions with dimension name as keys, and the value
as the length of the dimension. NOTE: 0 value means UNLIMITED.
vars: list of dictionaries
each variable to define is a dictionary that contains three keys:
name: string name of variable
type: string type (float, double, etc.)
dims: comma separated string of dimensions ("ocean_time, eta_rho")
attr: dictionary of variable attributes where the key is
the attribute name and the value is the attribute string
attr: dict, optional
optional dictionary of global attributes for the netcdf file:
key is the attribute name and the value is the attribute string
title: string, optional
netcdf attribute title
clobber: bool, optional
If True, destroy existing file
format: string, optional
NetCDF format to use. Default is NETCDF4_CLASSIC
Returns
-------
nc, netCDF4 object
Examples
--------
>>> dims = {"ocean_time":0, "eta_rho":120, "xi_rho":100}
>>> vars = [ {"name":"eta_slice", "type":"double",
"dims":"ocean_time, eta_rho",
"attr":{"units":"degrees Celcius"}},
{"name":"xi_slice", "type":"double",
"dims":"ocean_time, xi_rho",
"attr":{"units":"degrees Celcius"}} ]
>>> seapy.roms.ncgen("test.nc", dims=dims, vars=vars, title="Test")
"""
vars = np.atleast_1d(vars)
if dims is None:
dims = {}
if attr is None:
attr = {}
# Create the file
if not os.path.isfile(filename) or clobber:
_nc = netCDF4.Dataset(filename, "w", format=format)
# Loop over the dimensions and add them
for dim in dims:
_nc.createDimension(dim, dims[dim])
# Loop over the variables and add them
for var in vars:
add_variable(_nc, var)
# Add global attributes
for a in attr:
_nc.setncattr(a, attr[a])
try:
_nc.author = os.getenv('USER') or \
os.getenv('LOGNAME') or \
os.getenv('USERNAME') or \
os.getlogin() or \
'nobody'
except (AttributeError, IOError, OSError, FileNotFoundError) as e:
_nc.author = 'nobody'
_nc.history = datetime.now().strftime(
"Created on %a, %B %d, %Y at %H:%M")
if title is not None:
_nc.title = title
_nc.close()
else:
warn(filename + " already exists. Using existing definition")
return netCDF4.Dataset(filename, "a")
pass
def _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho):
"""
internal method: Set grid dimensions
"""
if "xi_rho" in dims.keys():
dims["xi_rho"] = xi_rho
if "xi_u" in dims.keys():
dims["xi_u"] = xi_rho - 1
if "xi_v" in dims.keys():
dims["xi_v"] = xi_rho
if "xi_psi" in dims.keys():
dims["xi_psi"] = xi_rho - 1
if "eta_rho" in dims.keys():
dims["eta_rho"] = eta_rho
if "eta_u" in dims.keys():
dims["eta_u"] = eta_rho
if "eta_v" in dims.keys():
dims["eta_v"] = eta_rho - 1
if "eta_psi" in dims.keys():
dims["eta_psi"] = eta_rho - 1
if "s_rho" in dims.keys():
dims["s_rho"] = s_rho
if "s_w" in dims.keys():
dims["s_w"] = s_rho + 1
return dims
def _set_time_ref(vars, timevar, reftime, cycle=None):
"""
internal method: Set time reference
"""
if isinstance(timevar, str):
timevar = [timevar]
for tvar in timevar:
for nvar in vars:
if nvar["name"] == tvar:
if "units" in nvar["attr"]:
t = re.findall('(\w+) since .*', nvar["attr"]["units"])
nvar["attr"]["units"] = \
"{:s} since {:s}".format(t[0], str(reftime))
else:
nvar["attr"]["units"] = \
"days since {:s}".format(str(reftime))
if cycle is not None:
nvar["attr"]["cycle_length"] = cycle
return vars
def add_variable(nc, var):
"""
Add a new variable with meta data to an existing netcdf file
Parameters
----------
filename : string or netCDF4 object
name or file to add the variable to
vars: dictionary
name: string name of variable
type: string type (float, double, etc.)
dims: comma separated string of dimensions ("ocean_time, eta_rho")
attr: dictionary of variable attributes where the key is
the attribute name and the value is the attribute string
Returns
-------
nc, netCDF4 object
Examples
--------
>>> var = {"name":"eta_slice", "type":"double",
"dims":"ocean_time, eta_rho",
"attr":{"units":"degrees Celcius"}}
>>> nc = seapy.roms.ncgen.add_variable("test.nc", var)
"""
if nc is None:
raise AttributeError("No file was specified")
if isinstance(nc, netCDF4._netCDF4.Dataset):
pass
else:
nc = netCDF4.Dataset(nc, "a")
# Handle the dimensions by enforcing a tuple list rather
# than a list of strings, then add whatever we have
try:
dims = var['dims'].replace(" ", "").split(',')
except:
dims = var['dims']
try:
nvar = nc.createVariable(var["name"], var["type"], dims)
except:
nvar = nc.createVariable(var["name"], var["type"])
try:
for key in var["attr"]:
# Check if it is a number and convert
astr = __number_or_string(var["attr"][key])
setattr(nvar, key, astr)
except KeyError:
pass
return nc
def _create_generic_file(filename, cdl, eta_rho, xi_rho, s_rho,
reftime=None, clobber=False, title="ROMS"):
"""
internal method: Generic file creator that uses ocean_time
"""
# Generate the Structure
dims, vars, attr = cdl_parser(cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho)
if reftime is not None:
vars = _set_time_ref(vars, "ocean_time", reftime)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_psource(filename, nriver=1, s_rho=5,
reftime=default_epoch, clobber=False, cdl=None, title="My River"):
"""
Create a new, blank point source file
Parameters
----------
filename : string
name and path of file to create
nriver : int, optional
number of rivers to put in file
s_rho: int, optional
number of s-levels
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "frc_rivers.cdl" if cdl is None else cdl)
# Fill in the appropriate river values
dims["river"] = nriver
dims["s_rho"] = s_rho
vars = _set_time_ref(vars, "river_time", reftime)
# Create the river file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_grid(filename, eta_rho=10, xi_rho=10, s_rho=1, clobber=False,
cdl=None, title="My Grid"):
"""
Create a new, blank grid file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "roms_grid.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho)
print(dims)
# Create the grid file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_adsen(filename, eta_rho=10, xi_rho=10, s_rho=1,
reftime=default_epoch, clobber=False, cdl=None, title="My Adsen"):
"""
Create a new adjoint sensitivity file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Create the general file
return _create_generic_file(filename, _cdl_dir + "adsen.cdl" if cdl is None else cdl,
eta_rho, xi_rho, s_rho, reftime, clobber, title)
def create_bry(filename, eta_rho=10, xi_rho=10, s_rho=1,
reftime=default_epoch, clobber=False, cdl=None, title="My BRY"):
"""
Create a bry forcing file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "bry_unlimit.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho)
vars = _set_time_ref(vars, "bry_time", reftime)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_clim(filename, eta_rho=10, xi_rho=10, s_rho=1,
reftime=default_epoch, clobber=False, cdl=None, title="My CLIM"):
"""
Create a climatology forcing file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "clm_ts.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho)
vars = _set_time_ref(vars, "clim_time", reftime)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_frc_bulk(filename, lat=10, lon=10,
reftime=default_epoch, clobber=False, cdl=None,
title="My Forcing"):
"""
Create a bulk flux forcing file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "frc_bulk.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims["lat"] = lat
dims["lon"] = lon
vars = _set_time_ref(vars, "frc_time", reftime)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_frc_direct(filename, eta_rho=10, xi_rho=10,
reftime=default_epoch, clobber=False, cdl=None,
title="My Forcing"):
"""
Create a direct surface forcing file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "frc_direct.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = {'y_rho': eta_rho,
'y_u': eta_rho,
'y_v': eta_rho - 1,
'x_rho': xi_rho,
'x_u': xi_rho - 1,
'x_v': xi_rho,
'frc_time': 0}
vars = _set_time_ref(vars, 'frc_time', reftime)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_frc_flux(filename, eta_rho=10, xi_rho=10, ntimes=1,
cycle=None, reftime=default_epoch, clobber=False,
cdl=None, title="My Flux"):
"""
Create a surface flux forcing file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
ntimes: int, optional
number of time records (climatology files do not have unlimited
dimension)
cycle: int or None, optional
The number of days before cycling the forcing records
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "frc_fluxclm.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, 1)
times = ("srf_time", "shf_time", "swf_time", "sss_time")
for n in times:
dims[n] = ntimes
vars = _set_time_ref(vars, times, reftime)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_frc_srelax(filename, eta_rho=10, xi_rho=10, s_rho=1, cycle=None,
reftime=default_epoch, clobber=False, cdl=None,
title="My Srelaxation"):
"""
Create a Salt Relaxation forcing file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
cycle: int or None, optional
The number of days before cycling the forcing records
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "frc_srelax.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho)
vars = _set_time_ref(vars, "sss_time", reftime, cycle)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_frc_qcorr(filename, eta_rho=10, xi_rho=10, s_rho=1, cycle=None,
reftime=default_epoch, clobber=False, cdl=None,
title="My Qcorrection"):
"""
Create a Q Correction forcing file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
cycle: int or None, optional
The number of days before cycling the forcing records
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "frc_qcorr.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho)
vars = _set_time_ref(vars, "sst_time", reftime, cycle)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_frc_wind(filename, eta_rho=10, xi_rho=10, s_rho=1, cycle=None,
reftime=default_epoch, clobber=False, cdl=None,
title="My Winds"):
"""
Create a surface wind stress forcing file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
cycle: int or None, optional
The number of days before cycling the forcing records
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "frc_windstress.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho)
vars = _set_time_ref(vars, "sms_time", reftime, cycle)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_frc_wave(filename, eta_rho=10, xi_rho=10, reftime=default_epoch,
clobber=False, cdl=None, title="My Waves"):
"""
Create a surface wave forcing file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "frc_wave.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho=1)
vars = _set_time_ref(vars, "wave_time", reftime)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_tide(filename, eta_rho=10, xi_rho=10, s_rho=1, ntides=1,
reftime=default_epoch, clobber=False,
title="My Tides"):
"""
Create a barotropic tide forcing file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
ntides: int, optional
number of tidal frequencies to force with
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(_cdl_dir + "frc_tides.cdl")
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho)
dims["tide_period"] = ntides
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_ini(filename, eta_rho=10, xi_rho=10, s_rho=1,
reftime=default_epoch, clobber=False, cdl=None, title="My Ini"):
"""
Create an initial condition file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "ini_hydro.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho)
vars = _set_time_ref(vars, "ocean_time", reftime)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_nudge_coef(filename, eta_rho=10, xi_rho=10, s_rho=1, clobber=False,
cdl=None, title="My Nudging"):
"""
Create a nudging coefficients file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "nudge_coef.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_da_obs(filename, state_variable=20, survey=1, provenance=None,
clobber=False, cdl=None, title="My Observations"):
"""
Create an assimilation observations file
Parameters
----------
filename : string
name and path of file to create
survey: int, optional
number of surveys in the file
state_variable: int, optional
number of state variables in the observations
provenance: string, optional
Description of the provenance values
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "s4dvar_obs.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims["survey"] = survey
dims["state_variable"] = state_variable
# Set the provenance values in the global attributes
if provenance is not None:
attr["obs_provenance"] = str(provenance)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title, format="NETCDF3_64BIT")
# Return the new file
return _nc
def create_da_ray_obs(filename, ray_datum=1, provenance="None",
reftime=default_epoch, clobber=False,
cdl=None, title="My Observations"):
"""
Create an acoustic ray assimilation observations file
Parameters
----------
filename : string
name and path of file to create
ray_datum: int, optional
Number of rays to assimilate
provenance: string, optional
Description of the provenance values
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "s4dvar_obs_ray.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims["ray_datum"] = ray_datum
vars = _set_time_ref(vars, "obs_time", reftime)
# Set the provenance values in the global attributes
attr["obs_provenance"] = provenance
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_da_bry_std(filename, eta_rho=10, xi_rho=10, s_rho=1, bry=4,
reftime=default_epoch, clobber=False, cdl=None,
title="My BRY STD"):
"""
Create a boundaries standard deviation file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
bry: int, optional
number of open boundaries to specify
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "s4dvar_std_b.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho)
dims["IorJ"] = max(eta_rho, xi_rho)
dims["boundary"] = bry
vars = _set_time_ref(vars, "ocean_time", reftime)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_da_frc_std(filename, eta_rho=10, xi_rho=10, s_rho=1,
reftime=default_epoch, clobber=False,
cdl=None, title="My FRC STD"):
"""
Create a forcing standard deviation file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "s4dvar_std_f.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho)
vars = _set_time_ref(vars, "ocean_time", reftime)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_da_ini_std(filename, eta_rho=10, xi_rho=10, s_rho=1,
reftime=default_epoch, clobber=False,
cdl=None, title="My INI STD"):
"""
Create an initialization standard deviation file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "s4dvar_std_i.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho)
vars = _set_time_ref(vars, "ocean_time", reftime)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_da_model_std(filename, eta_rho=10, xi_rho=10, s_rho=1,
reftime=default_epoch, clobber=False,
cdl=None, title="My Model STD"):
"""
Create an time varying model standard deviation file
Parameters
----------
filename : string
name and path of file to create
eta_rho: int, optional
number of rows in the eta direction
xi_rho: int, optional
number of columns in the xi direction
s_rho: int, optional
number of s-levels
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
Returns
-------
nc, netCDF4 object
"""
# Generate the Structure
dims, vars, attr = cdl_parser(
_cdl_dir + "s4dvar_std_m.cdl" if cdl is None else cdl)
# Fill in the appropriate dimension values
dims = _set_grid_dimensions(dims, eta_rho, xi_rho, s_rho)
vars = _set_time_ref(vars, "ocean_time", reftime)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_zlevel_grid(filename, lat=10, lon=10, depth=1,
clobber=False, cdl=None,
title="Zlevel Grid", dims=2):
"""
Create z-level grid file
Parameters
----------
filename : string
name and path of file to create
lat: int, optional
number of latitudinal rows
lon: int, optional
number of longitudinal columns
depth: int, optional
number of z-levels
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
dims: int, optional
number of dimensions to use for lat/lon
Returns
-------
nc, netCDF4 object
"""
if cdl == None:
if dims == 1:
cdlfile = _cdl_dir + "zlevel_1d_grid.cdl"
else:
cdlfile = _cdl_dir + "zlevel_2d_grid.cdl"
else:
cdlfile = cdl
# Generate the Structure
dims, vars, attr = cdl_parser(cdlfile)
# Fill in the appropriate dimension values
dims["lat"] = lat
dims["lon"] = lon
dims["depth"] = depth
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
def create_zlevel(filename, lat=10, lon=10, depth=1,
reftime=default_epoch,
clobber=False, cdl=None,
title="Zlevel Model Data", dims=2):
"""
Create an time varying model standard deviation file
Parameters
----------
filename : string
name and path of file to create
lat: int, optional
number of latitudinal rows
lon: int, optional
number of longitudinal columns
depth: int, optional
number of z-levels
reftime: datetime, optional
date of epoch for time origin in netcdf
clobber: bool, optional
If True, clobber any existing files and recreate. If False, use
the existing file definition
cdl: string, optional,
Use the specified CDL file as the definition for the new
netCDF file.
title: string, optional
netcdf attribute title
dims: int, optional
number of dimensions to use for lat/lon
Returns
-------
nc, netCDF4 object
"""
if cdl == None:
if dims == 1:
cdlfile = _cdl_dir + "zlevel_1d.cdl"
else:
cdlfile = _cdl_dir + "zlevel_2d.cdl"
else:
cdlfile = cdl
# Generate the Structure
dims, vars, attr = cdl_parser(cdlfile)
# Fill in the appropriate dimension values
dims["lat"] = lat
dims["lon"] = lon
dims["depth"] = depth
vars = _set_time_ref(vars, "time", reftime)
# Create the file
_nc = ncgen(filename, dims=dims, vars=vars, attr=attr, clobber=clobber,
title=title)
# Return the new file
return _nc
if __name__ == "__main__":
grid = create_zlevel("test.nc")
| 29.418791
| 89
| 0.618588
| 5,357
| 40,392
| 4.546575
| 0.068508
| 0.023321
| 0.046765
| 0.052266
| 0.8152
| 0.79504
| 0.789497
| 0.780793
| 0.774183
| 0.764699
| 0
| 0.006707
| 0.298648
| 40,392
| 1,372
| 90
| 29.440233
| 0.853048
| 0.515993
| 0
| 0.435967
| 1
| 0
| 0.080769
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.079019
| false
| 0.010899
| 0.024523
| 0
| 0.182561
| 0.002725
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1d7253fcff3f12f0d7ea0ff1161327ff93301fe
| 13,759
|
py
|
Python
|
Color_Reaching/src/measure_width.py
|
Igloogloo/Robot_Learning_Enviornments
|
ad82e35404afaf6f23bb5275e897526266972949
|
[
"BSD-3-Clause"
] | null | null | null |
Color_Reaching/src/measure_width.py
|
Igloogloo/Robot_Learning_Enviornments
|
ad82e35404afaf6f23bb5275e897526266972949
|
[
"BSD-3-Clause"
] | null | null | null |
Color_Reaching/src/measure_width.py
|
Igloogloo/Robot_Learning_Enviornments
|
ad82e35404afaf6f23bb5275e897526266972949
|
[
"BSD-3-Clause"
] | 2
|
2021-08-18T17:07:46.000Z
|
2021-09-03T22:05:06.000Z
|
#! /usr/bin/env python
import cv2
from scipy.spatial import distance as dist
from imutils import perspective
from imutils import contours
import numpy as np
import argparse
import imutils
def imcrop(img, x1,x2,y1,y2):
#x1, y1, x2, y2 = bbox
if x1 < 0 or y1 < 0 or x2 > img.shape[1] or y2 > img.shape[0]:
img, x1, x2, y1, y2 = pad_img_to_fit_bbox(img, x1, x2, y1, y2)
return img[y1:y2, x1:x2, :]
def pad_img_to_fit_bbox(img, x1, x2, y1, y2):
img = cv2.copyMakeBorder(img, - min(0, y1), max(y2 - img.shape[0], 0),
-min(0, x1), max(x2 - img.shape[1], 0),cv2.BORDER_REPLICATE)
y2 += -min(0, y1)
y1 += -min(0, y1)
x2 += -min(0, x1)
x1 += -min(0, x1)
return img
def midpoint(ptA, ptB):
return ((ptA[0] + ptB[0]) * 0.5, (ptA[1] + ptB[1]) * 0.5)
# construct the argument parse and parse the arguments
# define a video capture object
vid = cv2.VideoCapture(3)
pixelsPerMetric = 100
while(True):
# Capture the video frame
# by frame
ret, frame = vid.read()
# Display the resulting frame
#cv2.imshow('frame', frame)
image = frame
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
lower_blue = np.array([60,110,100])
upper_blue = np.array([130,255,255])
# Here we are defining range of bluecolor in HSV
# This creates a mask of blue coloured
# objects found in the frame.
mask = cv2.inRange(hsv, lower_blue, upper_blue)
#cv2.imshow('mask',mask)
res = cv2.bitwise_and(frame,frame, mask= mask)
gray = cv2.cvtColor(res, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (7, 7), 0)
#res = cv2.bitwise_and(frame,frame, mask= mask)
#gray = res
#cv2.imshow('frame',frame)
cv2.imshow('mask',mask)
cv2.imshow('res',res)
# perform edge detection, then perform a dilation + erosion to
# close gaps in between object edges
edged = cv2.Canny(gray, 50, 100)
edged = cv2.dilate(edged, None, iterations=1)
edged = cv2.erode(edged, None, iterations=1)
# find contours in the edge map
cnts = cv2.findContours(edged.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
cnts = imutils.grab_contours(cnts)
# sort the contours from left-to-right and initialize the
# 'pixels per metric' calibration variable
while len(cnts) == 0:
ret, frame = vid.read()
# Display the resulting frame
#cv2.imshow('frame', frame)
image = frame
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
lower_blue = np.array([60,110,100])
upper_blue = np.array([130,255,255])
# Here we are defining range of bluecolor in HSV
# This creates a mask of blue coloured
# objects found in the frame.
mask = cv2.inRange(hsv, lower_blue, upper_blue)
#cv2.imshow('mask',mask)
res = cv2.bitwise_and(frame,frame, mask= mask)
gray = cv2.cvtColor(res, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (7, 7), 0)
#res = cv2.bitwise_and(frame,frame, mask= mask)
#gray = res
#cv2.imshow('frame',frame)
#cv2.imshow('mask',mask)
#cv2.imshow('res',res)
# perform edge detection, then perform a dilation + erosion to
# close gaps in between object edges
edged = cv2.Canny(gray, 50, 100)
edged = cv2.dilate(edged, None, iterations=1)
edged = cv2.erode(edged, None, iterations=1)
# find contours in the edge map
cnts = cv2.findContours(edged.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
cnts = imutils.grab_contours(cnts)
(cnts, _) = contours.sort_contours(cnts)
# loop over the contours individually
for c in cnts:
# if the contour is not sufficiently large, ignore it
if cv2.contourArea(c) < 1650:
continue
# compute the rotated bounding box of the contour
orig = image.copy()
box = cv2.minAreaRect(c)
box = cv2.cv.BoxPoints(box) if imutils.is_cv2() else cv2.boxPoints(box)
box = np.array(box, dtype="int")
cropped = imcrop(orig, box.min(axis=0)[0], box.max(axis=0)[0], box.min(axis=0)[1], box.max(axis=0)[1])
#cropped = imcrop(img=orig, bbox=box)
# order the points in the contour such that they appear
# in top-left, top-right, bottom-right, and bottom-left
# order, then draw the outline of the rotated bounding
# box
box = perspective.order_points(box)
cv2.drawContours(orig, [box.astype("int")], -1, (0, 255, 0), 2)
# loop over the original points and draw them
for (x, y) in box:
cv2.circle(orig, (int(x), int(y)), 5, (0, 0, 255), -1)
# unpack the ordered bounding box, then compute the midpoint
# between the top-left and top-right coordinates, followed by
# the midpoint between bottom-left and bottom-right coordinates
(tl, tr, br, bl) = box
(tltrX, tltrY) = midpoint(tl, tr)
(blbrX, blbrY) = midpoint(bl, br)
# compute the midpoint between the top-left and top-right points,
# followed by the midpoint between the top-righ and bottom-right
(tlblX, tlblY) = midpoint(tl, bl)
(trbrX, trbrY) = midpoint(tr, br)
# draw the midpoints on the image
cv2.circle(orig, (int(tltrX), int(tltrY)), 5, (255, 0, 0), -1)
cv2.circle(orig, (int(blbrX), int(blbrY)), 5, (255, 0, 0), -1)
cv2.circle(orig, (int(tlblX), int(tlblY)), 5, (255, 0, 0), -1)
cv2.circle(orig, (int(trbrX), int(trbrY)), 5, (255, 0, 0), -1)
# draw lines between the midpoints
cv2.line(orig, (int(tltrX), int(tltrY)), (int(blbrX), int(blbrY)),
(255, 0, 255), 2)
cv2.line(orig, (int(tlblX), int(tlblY)), (int(trbrX), int(trbrY)),
(255, 0, 255), 2)
# the 'q' button is set as the
# quitting button you may use any
# desired button of your choice
# compute the Euclidean distance between the midpoints
dA = dist.euclidean((tltrX, tltrY), (blbrX, blbrY))
dB = dist.euclidean((tlblX, tlblY), (trbrX, trbrY))
#print(dA, dB)
# if the pixels per metric has not been initialized, then
# compute it as the ratio of pixels to supplied metric
# (in this case, inches)
#if pixelsPerMetric is None:
#pixelsPerMetric = dB / args["width"]
# compute the size of the object
dimA = dA / pixelsPerMetric
dimB = dB / pixelsPerMetric
# draw the object sizes on the image
cv2.putText(orig, "{:.1f}in".format(dimA),
(int(tltrX - 15), int(tltrY - 10)), cv2.FONT_HERSHEY_SIMPLEX,
0.65, (255, 255, 255), 2)
cv2.putText(orig, "{:.1f}in".format(dimB),
(int(trbrX + 10), int(trbrY)), cv2.FONT_HERSHEY_SIMPLEX,
0.65, (255, 255, 255), 2)
# show the output image
cv2.imshow("Image", cropped)
#cv2.waitKey(0)
#print("I am running")
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# After the loop release the cap object
vid.release()
# Destroy all the windows
cv2.destroyAllWindows()
def get_width_image(frame, pixelsPerMetric=100):
# Display the resulting frame
#cv2.imshow('frame', frame)
image = frame
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
lower_blue = np.array([60,110,100])
upper_blue = np.array([130,255,255])
# Here we are defining range of bluecolor in HSV
# This creates a mask of blue coloured
# objects found in the frame.
mask = cv2.inRange(hsv, lower_blue, upper_blue)
#cv2.imshow('mask',mask)
res = cv2.bitwise_and(frame,frame, mask= mask)
gray = cv2.cvtColor(res, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (7, 7), 0)
# perform edge detection, then perform a dilation + erosion to
# close gaps in between object edges
edged = cv2.Canny(gray, 50, 100)
edged = cv2.dilate(edged, None, iterations=1)
edged = cv2.erode(edged, None, iterations=1)
# find contours in the edge map
cnts = cv2.findContours(edged.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
cnts = imutils.grab_contours(cnts)
(cnts, _) = contours.sort_contours(cnts)
for c in cnts:
# if the contour is not sufficiently large, ignore it
if cv2.contourArea(c) < 1200:
continue
# compute the rotated bounding box of the contour
orig = image.copy()
box = cv2.minAreaRect(c)
box = cv2.cv.BoxPoints(box) if imutils.is_cv2() else cv2.boxPoints(box)
box = np.array(box, dtype="int")
# order the points in the contour such that they appear
# in top-left, top-right, bottom-right, and bottom-left
# order, then draw the outline of the rotated bounding
# box
box = perspective.order_points(box)
cv2.drawContours(orig, [box.astype("int")], -1, (0, 255, 0), 2)
# loop over the original points and draw them
for (x, y) in box:
cv2.circle(orig, (int(x), int(y)), 5, (0, 0, 255), -1)
# unpack the ordered bounding box, then compute the midpoint
# between the top-left and top-right coordinates, followed by
# the midpoint between bottom-left and bottom-right coordinates
(tl, tr, br, bl) = box
(tltrX, tltrY) = midpoint(tl, tr)
(blbrX, blbrY) = midpoint(bl, br)
# compute the midpoint between the top-left and top-right points,
# followed by the midpoint between the top-righ and bottom-right
(tlblX, tlblY) = midpoint(tl, bl)
(trbrX, trbrY) = midpoint(tr, br)
# draw the midpoints on the image
cv2.circle(orig, (int(tltrX), int(tltrY)), 5, (255, 0, 0), -1)
cv2.circle(orig, (int(blbrX), int(blbrY)), 5, (255, 0, 0), -1)
cv2.circle(orig, (int(tlblX), int(tlblY)), 5, (255, 0, 0), -1)
cv2.circle(orig, (int(trbrX), int(trbrY)), 5, (255, 0, 0), -1)
# draw lines between the midpoints
cv2.line(orig, (int(tltrX), int(tltrY)), (int(blbrX), int(blbrY)),
(255, 0, 255), 2)
cv2.line(orig, (int(tlblX), int(tlblY)), (int(trbrX), int(trbrY)),
(255, 0, 255), 2)
# the 'q' button is set as the
# quitting button you may use any
# desired button of your choice
# compute the Euclidean distance between the midpoints
dA = dist.euclidean((tltrX, tltrY), (blbrX, blbrY))
dB = dist.euclidean((tlblX, tlblY), (trbrX, trbrY))
#print(dA, dB)
# if the pixels per metric has not been initialized, then
# compute it as the ratio of pixels to supplied metric
# (in this case, inches)
#if pixelsPerMetric is None:
#pixelsPerMetric = dB / args["width"]
# compute the size of the object
dimA = dA / pixelsPerMetric
dimB = dB / pixelsPerMetric
# draw the object sizes on the image
cv2.putText(orig, "{:.1f}in".format(dimA),
(int(tltrX - 15), int(tltrY - 10)), cv2.FONT_HERSHEY_SIMPLEX,
0.65, (255, 255, 255), 2)
cv2.putText(orig, "{:.1f}in".format(dimB),
(int(trbrX + 10), int(trbrY)), cv2.FONT_HERSHEY_SIMPLEX,
0.65, (255, 255, 255), 2)
# show the output image
#cv2.imshow("Image", orig)
return orig
def get_width(frame, pixelsPerMetric, max_width=4):
"""
Returns width of blue object.
Max_width is returned if no object is fully detectable and therefore
no contours found.
"""
image = frame
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
lower_blue = np.array([60,110,100])
upper_blue = np.array([130,255,255])
mask = cv2.inRange(hsv, lower_blue, upper_blue)
res = cv2.bitwise_and(frame,frame, mask= mask)
gray = cv2.cvtColor(res, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (7, 7), 0)
edged = cv2.Canny(gray, 50, 100)
edged = cv2.dilate(edged, None, iterations=1)
edged = cv2.erode(edged, None, iterations=1)
cnts = cv2.findContours(edged.copy(), cv2.RETR_EXTERNAL,
cv2.CHAIN_APPROX_SIMPLE)
cnts = imutils.grab_contours(cnts)
if len(cnts) == 0:
return (max_width, max_width)
(cnts, _) = contours.sort_contours(cnts)
for c in cnts:
if cv2.contourArea(c) < 1200:
continue
orig = image.copy()
box = cv2.minAreaRect(c)
box = cv2.cv.BoxPoints(box) if imutils.is_cv2() else cv2.boxPoints(box)
box = np.array(box, dtype="int")
box = perspective.order_points(box)
cv2.drawContours(orig, [box.astype("int")], -1, (0, 255, 0), 2)
for (x, y) in box:
cv2.circle(orig, (int(x), int(y)), 5, (0, 0, 255), -1)
(tl, tr, br, bl) = box
(tltrX, tltrY) = midpoint(tl, tr)
(blbrX, blbrY) = midpoint(bl, br)
(tlblX, tlblY) = midpoint(tl, bl)
(trbrX, trbrY) = midpoint(tr, br)
cv2.circle(orig, (int(tltrX), int(tltrY)), 5, (255, 0, 0), -1)
cv2.circle(orig, (int(blbrX), int(blbrY)), 5, (255, 0, 0), -1)
cv2.circle(orig, (int(tlblX), int(tlblY)), 5, (255, 0, 0), -1)
cv2.circle(orig, (int(trbrX), int(trbrY)), 5, (255, 0, 0), -1)
cv2.line(orig, (int(tltrX), int(tltrY)), (int(blbrX), int(blbrY)),
(255, 0, 255), 2)
cv2.line(orig, (int(tlblX), int(tlblY)), (int(trbrX), int(trbrY)),
(255, 0, 255), 2)
dA = dist.euclidean((tltrX, tltrY), (blbrX, blbrY))
dB = dist.euclidean((tlblX, tlblY), (trbrX, trbrY))
dimA = dA / pixelsPerMetric
dimB = dB / pixelsPerMetric
return (dimA, dimB)
| 39.997093
| 110
| 0.598154
| 1,957
| 13,759
| 4.163516
| 0.137966
| 0.010309
| 0.023932
| 0.029455
| 0.860457
| 0.857757
| 0.848797
| 0.848797
| 0.844502
| 0.844502
| 0
| 0.05718
| 0.269133
| 13,759
| 344
| 111
| 39.997093
| 0.753083
| 0.28803
| 0
| 0.795918
| 0
| 0
| 0.006523
| 0
| 0
| 0
| 0.000414
| 0
| 0
| 1
| 0.02551
| false
| 0
| 0.035714
| 0.005102
| 0.091837
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c038f7704e44a8e682f1b2679be91d71acc0abc7
| 6,467
|
py
|
Python
|
campaign/migrations/0001_initial.py
|
devenney/reverie
|
43b2948678ff1e76a1b7a9cab812c0dfbf3633a0
|
[
"MIT"
] | null | null | null |
campaign/migrations/0001_initial.py
|
devenney/reverie
|
43b2948678ff1e76a1b7a9cab812c0dfbf3633a0
|
[
"MIT"
] | 47
|
2017-08-30T00:33:20.000Z
|
2018-03-09T07:38:39.000Z
|
campaign/migrations/0001_initial.py
|
devenney/reverie
|
43b2948678ff1e76a1b7a9cab812c0dfbf3633a0
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.5 on 2020-04-04 11:39
import campaign.helpers
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import image_cropping.fields
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Campaign',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(unique=True)),
('name', models.CharField(max_length=50, unique=True)),
('tagline', models.CharField(max_length=50)),
('image', models.ImageField(blank=True, upload_to=campaign.helpers.RandomFileName('images/campaign'))),
('cropping', image_cropping.fields.ImageRatioField('image', '1500x500', adapt_rotation=False, allow_fullsize=False, free_crop=False, help_text=None, hide_image_field=False, size_warning=False, verbose_name='cropping')),
('description', models.TextField(max_length=500)),
('public', models.BooleanField(default=False)),
('game_master', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, related_name='gm', to=settings.AUTH_USER_MODEL)),
('players', models.ManyToManyField(blank=True, null=True, related_name='player', to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Log',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(unique=True)),
('name', models.CharField(max_length=50, unique=True)),
('image', models.ImageField(blank=True, upload_to='uploaded_images')),
('cropping', image_cropping.fields.ImageRatioField('image', '1500x500', adapt_rotation=False, allow_fullsize=False, free_crop=False, help_text=None, hide_image_field=False, size_warning=False, verbose_name='cropping')),
('description', models.TextField(max_length=5000)),
('date', models.DateField()),
('campaign', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='campaign.Campaign')),
],
options={
'ordering': ['date'],
},
),
migrations.CreateModel(
name='Location',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(unique=True)),
('name', models.CharField(max_length=50, unique=True)),
('tagline', models.CharField(max_length=50, unique=True)),
('image', models.ImageField(blank=True, upload_to='uploaded_images')),
('cropping', image_cropping.fields.ImageRatioField('image', '500x500', adapt_rotation=False, allow_fullsize=False, free_crop=False, help_text=None, hide_image_field=False, size_warning=False, verbose_name='cropping')),
('description', models.TextField(max_length=500)),
('campaign', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='campaign.Campaign')),
],
),
migrations.CreateModel(
name='Item',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(unique=True)),
('name', models.CharField(max_length=50, unique=True)),
('tagline', models.CharField(max_length=50, unique=True)),
('image', models.ImageField(blank=True, upload_to='uploaded_images')),
('cropping', image_cropping.fields.ImageRatioField('image', '500x500', adapt_rotation=False, allow_fullsize=False, free_crop=False, help_text=None, hide_image_field=False, size_warning=False, verbose_name='cropping')),
('description', models.TextField(max_length=500)),
('campaign', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='campaign.Campaign')),
],
),
migrations.CreateModel(
name='Faction',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(unique=True)),
('name', models.CharField(max_length=50, unique=True)),
('tagline', models.CharField(max_length=50)),
('image', models.ImageField(blank=True, upload_to='uploaded_images')),
('cropping', image_cropping.fields.ImageRatioField('image', '500x500', adapt_rotation=False, allow_fullsize=False, free_crop=False, help_text=None, hide_image_field=False, size_warning=False, verbose_name='cropping')),
('description', models.TextField(max_length=500)),
('campaign', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='campaign.Campaign')),
],
),
migrations.CreateModel(
name='Character',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('slug', models.SlugField(blank=True, unique=True)),
('name', models.CharField(max_length=50, unique=True)),
('is_pc', models.BooleanField(default=False)),
('tagline', models.CharField(max_length=50)),
('image', models.ImageField(blank=True, upload_to='uploaded_images')),
('cropping', image_cropping.fields.ImageRatioField('image', '500x500', adapt_rotation=False, allow_fullsize=False, free_crop=False, help_text=None, hide_image_field=False, size_warning=False, verbose_name='cropping')),
('description', models.TextField(max_length=500)),
('campaign', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='campaign.Campaign')),
('player', models.ForeignKey(blank=True, null=True, on_delete=django.db.models.deletion.PROTECT, to=settings.AUTH_USER_MODEL)),
],
),
]
| 61.590476
| 235
| 0.626256
| 680
| 6,467
| 5.777941
| 0.155882
| 0.038941
| 0.048867
| 0.067193
| 0.824892
| 0.807839
| 0.807839
| 0.807839
| 0.797913
| 0.784424
| 0
| 0.018876
| 0.229937
| 6,467
| 104
| 236
| 62.182692
| 0.77008
| 0.006958
| 0
| 0.690722
| 1
| 0
| 0.106854
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.051546
| 0
| 0.092784
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fbe47a5dc0bfcf96ec8c1693d002a063949acc25
| 4,300
|
py
|
Python
|
dentexchange/apps/matches/tests/test_delete_job_posting_match_view.py
|
hellhound/dentexchange
|
58ae303e842404fc9e1860f294ec8044a332bef3
|
[
"BSD-3-Clause"
] | 1
|
2017-11-09T23:09:51.000Z
|
2017-11-09T23:09:51.000Z
|
dentexchange/apps/matches/tests/test_delete_job_posting_match_view.py
|
hellhound/dentexchange
|
58ae303e842404fc9e1860f294ec8044a332bef3
|
[
"BSD-3-Clause"
] | null | null | null |
dentexchange/apps/matches/tests/test_delete_job_posting_match_view.py
|
hellhound/dentexchange
|
58ae303e842404fc9e1860f294ec8044a332bef3
|
[
"BSD-3-Clause"
] | 3
|
2015-08-11T16:58:47.000Z
|
2021-01-04T08:23:51.000Z
|
# -*- coding:utf-8 -*-
import unittest
import mock
from django.contrib.auth.models import User
from ..views import DeleteJobPostingMatchView
from ..models import Match
from employer.models import JobPosting
class DeleteJobPostingMatchViewTestCase(unittest.TestCase):
@mock.patch('matches.views.views.JSONResponseMixin.render_json_response')
@mock.patch('matches.views.JobPosting.objects.get')
def test_get_ajax_should_create_match_with_job_posting_and_return_status_ok(
self, get, render_json_response):
# setup
view = DeleteJobPostingMatchView()
user = User()
request = mock.Mock()
pk = 1
request.GET = dict(pk=pk)
request.user = user
view.request = request
context = dict(status='ok', total=0)
matching_object = get.return_value
match = matching_object.matches.get.return_value
# action
returned_value = view.get_ajax(request)
# assert
self.assertDictEqual(dict(pk=pk), get.call_args[1])
self.assertDictEqual(dict(user=user),
matching_object.matches.get.call_args[1])
self.assertEqual(1, match.delete.call_count)
self.assertTupleEqual((context,), render_json_response.call_args[0])
self.assertEqual(id(render_json_response.return_value),
id(returned_value))
@mock.patch('matches.views.views.JSONResponseMixin.render_json_response')
@mock.patch('matches.views.JobPosting.objects.get')
def test_get_ajax_should_return_bad_request_response_when_pk_doesnt_exist(
self, get, render_json_response):
# setup
view = DeleteJobPostingMatchView()
user = User()
request = mock.Mock()
pk = 1
request.GET = dict(pk=pk)
request.user = user
view.request = request
context = dict(status='error')
get.side_effect = JobPosting.DoesNotExist
# action
returned_value = view.get_ajax(request)
# assert
self.assertDictEqual(dict(pk=pk), get.call_args[1])
self.assertTupleEqual(((context,), dict(status=400),),
render_json_response.call_args)
self.assertEqual(id(render_json_response.return_value),
id(returned_value))
@mock.patch('matches.views.views.JSONResponseMixin.render_json_response')
@mock.patch('matches.views.JobPosting.objects.get')
def test_get_ajax_should_return_bad_request_response_when_multiple_pk(
self, get, render_json_response):
# setup
view = DeleteJobPostingMatchView()
user = User()
request = mock.Mock()
pk = 1
request.GET = dict(pk=pk)
request.user = user
view.request = request
context = dict(status='error')
get.side_effect = JobPosting.MultipleObjectsReturned
# action
returned_value = view.get_ajax(request)
# assert
self.assertDictEqual(dict(pk=pk), get.call_args[1])
self.assertTupleEqual(((context,), dict(status=400),),
render_json_response.call_args)
self.assertEqual(id(render_json_response.return_value),
id(returned_value))
@mock.patch('matches.views.views.JSONResponseMixin.render_json_response')
@mock.patch('matches.views.JobPosting.objects.get')
def test_get_ajax_should_return_bad_request_response_when_theres_no_match_for_matching_object(
self, get, render_json_response):
# setup
view = DeleteJobPostingMatchView()
user = User()
request = mock.Mock()
pk = 1
request.GET = dict(pk=pk)
request.user = user
view.request = request
context = dict(status='error')
matching_object = get.return_value
matching_object.matches.get.side_effect = Match.DoesNotExist
# action
returned_value = view.get_ajax(request)
# assert
self.assertDictEqual(dict(pk=pk), get.call_args[1])
self.assertDictEqual(dict(user=user),
matching_object.matches.get.call_args[1])
self.assertTupleEqual(((context,), dict(status=400),),
render_json_response.call_args)
self.assertEqual(id(render_json_response.return_value),
id(returned_value))
| 36.440678
| 98
| 0.664186
| 492
| 4,300
| 5.565041
| 0.158537
| 0.058437
| 0.105186
| 0.061359
| 0.822863
| 0.792915
| 0.792915
| 0.792915
| 0.792915
| 0.792915
| 0
| 0.006989
| 0.234651
| 4,300
| 117
| 99
| 36.752137
| 0.824977
| 0.023256
| 0
| 0.797753
| 0
| 0
| 0.093884
| 0.089823
| 0
| 0
| 0
| 0
| 0.168539
| 1
| 0.044944
| false
| 0
| 0.067416
| 0
| 0.123596
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
97d6f02818dac0896b0df96e22a665185a82c4e5
| 34,923
|
py
|
Python
|
sdk/python/pulumi_openstack/loadbalancer/pool_v1.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 34
|
2018-09-12T12:37:51.000Z
|
2022-02-04T19:32:13.000Z
|
sdk/python/pulumi_openstack/loadbalancer/pool_v1.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 72
|
2018-08-15T13:04:57.000Z
|
2022-03-31T15:39:49.000Z
|
sdk/python/pulumi_openstack/loadbalancer/pool_v1.py
|
pulumi/pulumi-openstack
|
945eed22a82784e9f0b3aa56168b2397c2f503e8
|
[
"ECL-2.0",
"Apache-2.0"
] | 7
|
2019-03-14T08:28:49.000Z
|
2021-12-29T04:23:55.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['PoolV1Args', 'PoolV1']
@pulumi.input_type
class PoolV1Args:
def __init__(__self__, *,
lb_method: pulumi.Input[str],
protocol: pulumi.Input[str],
subnet_id: pulumi.Input[str],
lb_provider: Optional[pulumi.Input[str]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
monitor_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a PoolV1 resource.
:param pulumi.Input[str] lb_method: The algorithm used to distribute load between the
members of the pool. The current specification supports 'ROUND_ROBIN' and
'LEAST_CONNECTIONS' as valid values for this attribute.
:param pulumi.Input[str] protocol: The protocol used by the pool members, you can use
either 'TCP, 'HTTP', or 'HTTPS'. Changing this creates a new pool.
:param pulumi.Input[str] subnet_id: The network on which the members of the pool will be
located. Only members that are on this network can be added to the pool.
Changing this creates a new pool.
:param pulumi.Input[str] lb_provider: The backend load balancing provider. For example:
`haproxy`, `F5`, etc.
:param pulumi.Input[Sequence[pulumi.Input[str]]] members: An existing node to add to the pool. Changing this
updates the members of the pool. The member object structure is documented
below. Please note that the `member` block is deprecated in favor of the
`loadbalancer.MemberV1` resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] monitor_ids: A list of IDs of monitors to associate with the
pool.
:param pulumi.Input[str] name: The name of the pool. Changing this updates the name of
the existing pool.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create an LB pool. If omitted, the
`region` argument of the provider is used. Changing this creates a new
LB pool.
:param pulumi.Input[str] tenant_id: The owner of the member. Required if admin wants to
create a pool member for another tenant. Changing this creates a new member.
"""
pulumi.set(__self__, "lb_method", lb_method)
pulumi.set(__self__, "protocol", protocol)
pulumi.set(__self__, "subnet_id", subnet_id)
if lb_provider is not None:
pulumi.set(__self__, "lb_provider", lb_provider)
if members is not None:
warnings.warn("""Use openstack_lb_member_v1 instead""", DeprecationWarning)
pulumi.log.warn("""members is deprecated: Use openstack_lb_member_v1 instead""")
if members is not None:
pulumi.set(__self__, "members", members)
if monitor_ids is not None:
pulumi.set(__self__, "monitor_ids", monitor_ids)
if name is not None:
pulumi.set(__self__, "name", name)
if region is not None:
pulumi.set(__self__, "region", region)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
@property
@pulumi.getter(name="lbMethod")
def lb_method(self) -> pulumi.Input[str]:
"""
The algorithm used to distribute load between the
members of the pool. The current specification supports 'ROUND_ROBIN' and
'LEAST_CONNECTIONS' as valid values for this attribute.
"""
return pulumi.get(self, "lb_method")
@lb_method.setter
def lb_method(self, value: pulumi.Input[str]):
pulumi.set(self, "lb_method", value)
@property
@pulumi.getter
def protocol(self) -> pulumi.Input[str]:
"""
The protocol used by the pool members, you can use
either 'TCP, 'HTTP', or 'HTTPS'. Changing this creates a new pool.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: pulumi.Input[str]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> pulumi.Input[str]:
"""
The network on which the members of the pool will be
located. Only members that are on this network can be added to the pool.
Changing this creates a new pool.
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: pulumi.Input[str]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="lbProvider")
def lb_provider(self) -> Optional[pulumi.Input[str]]:
"""
The backend load balancing provider. For example:
`haproxy`, `F5`, etc.
"""
return pulumi.get(self, "lb_provider")
@lb_provider.setter
def lb_provider(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lb_provider", value)
@property
@pulumi.getter
def members(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
An existing node to add to the pool. Changing this
updates the members of the pool. The member object structure is documented
below. Please note that the `member` block is deprecated in favor of the
`loadbalancer.MemberV1` resource.
"""
return pulumi.get(self, "members")
@members.setter
def members(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "members", value)
@property
@pulumi.getter(name="monitorIds")
def monitor_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of IDs of monitors to associate with the
pool.
"""
return pulumi.get(self, "monitor_ids")
@monitor_ids.setter
def monitor_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "monitor_ids", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the pool. Changing this updates the name of
the existing pool.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region in which to obtain the V2 Networking client.
A Networking client is needed to create an LB pool. If omitted, the
`region` argument of the provider is used. Changing this creates a new
LB pool.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
"""
The owner of the member. Required if admin wants to
create a pool member for another tenant. Changing this creates a new member.
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
@pulumi.input_type
class _PoolV1State:
def __init__(__self__, *,
lb_method: Optional[pulumi.Input[str]] = None,
lb_provider: Optional[pulumi.Input[str]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
monitor_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering PoolV1 resources.
:param pulumi.Input[str] lb_method: The algorithm used to distribute load between the
members of the pool. The current specification supports 'ROUND_ROBIN' and
'LEAST_CONNECTIONS' as valid values for this attribute.
:param pulumi.Input[str] lb_provider: The backend load balancing provider. For example:
`haproxy`, `F5`, etc.
:param pulumi.Input[Sequence[pulumi.Input[str]]] members: An existing node to add to the pool. Changing this
updates the members of the pool. The member object structure is documented
below. Please note that the `member` block is deprecated in favor of the
`loadbalancer.MemberV1` resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] monitor_ids: A list of IDs of monitors to associate with the
pool.
:param pulumi.Input[str] name: The name of the pool. Changing this updates the name of
the existing pool.
:param pulumi.Input[str] protocol: The protocol used by the pool members, you can use
either 'TCP, 'HTTP', or 'HTTPS'. Changing this creates a new pool.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create an LB pool. If omitted, the
`region` argument of the provider is used. Changing this creates a new
LB pool.
:param pulumi.Input[str] subnet_id: The network on which the members of the pool will be
located. Only members that are on this network can be added to the pool.
Changing this creates a new pool.
:param pulumi.Input[str] tenant_id: The owner of the member. Required if admin wants to
create a pool member for another tenant. Changing this creates a new member.
"""
if lb_method is not None:
pulumi.set(__self__, "lb_method", lb_method)
if lb_provider is not None:
pulumi.set(__self__, "lb_provider", lb_provider)
if members is not None:
warnings.warn("""Use openstack_lb_member_v1 instead""", DeprecationWarning)
pulumi.log.warn("""members is deprecated: Use openstack_lb_member_v1 instead""")
if members is not None:
pulumi.set(__self__, "members", members)
if monitor_ids is not None:
pulumi.set(__self__, "monitor_ids", monitor_ids)
if name is not None:
pulumi.set(__self__, "name", name)
if protocol is not None:
pulumi.set(__self__, "protocol", protocol)
if region is not None:
pulumi.set(__self__, "region", region)
if subnet_id is not None:
pulumi.set(__self__, "subnet_id", subnet_id)
if tenant_id is not None:
pulumi.set(__self__, "tenant_id", tenant_id)
@property
@pulumi.getter(name="lbMethod")
def lb_method(self) -> Optional[pulumi.Input[str]]:
"""
The algorithm used to distribute load between the
members of the pool. The current specification supports 'ROUND_ROBIN' and
'LEAST_CONNECTIONS' as valid values for this attribute.
"""
return pulumi.get(self, "lb_method")
@lb_method.setter
def lb_method(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lb_method", value)
@property
@pulumi.getter(name="lbProvider")
def lb_provider(self) -> Optional[pulumi.Input[str]]:
"""
The backend load balancing provider. For example:
`haproxy`, `F5`, etc.
"""
return pulumi.get(self, "lb_provider")
@lb_provider.setter
def lb_provider(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "lb_provider", value)
@property
@pulumi.getter
def members(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
An existing node to add to the pool. Changing this
updates the members of the pool. The member object structure is documented
below. Please note that the `member` block is deprecated in favor of the
`loadbalancer.MemberV1` resource.
"""
return pulumi.get(self, "members")
@members.setter
def members(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "members", value)
@property
@pulumi.getter(name="monitorIds")
def monitor_ids(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
A list of IDs of monitors to associate with the
pool.
"""
return pulumi.get(self, "monitor_ids")
@monitor_ids.setter
def monitor_ids(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "monitor_ids", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the pool. Changing this updates the name of
the existing pool.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def protocol(self) -> Optional[pulumi.Input[str]]:
"""
The protocol used by the pool members, you can use
either 'TCP, 'HTTP', or 'HTTPS'. Changing this creates a new pool.
"""
return pulumi.get(self, "protocol")
@protocol.setter
def protocol(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "protocol", value)
@property
@pulumi.getter
def region(self) -> Optional[pulumi.Input[str]]:
"""
The region in which to obtain the V2 Networking client.
A Networking client is needed to create an LB pool. If omitted, the
`region` argument of the provider is used. Changing this creates a new
LB pool.
"""
return pulumi.get(self, "region")
@region.setter
def region(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "region", value)
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> Optional[pulumi.Input[str]]:
"""
The network on which the members of the pool will be
located. Only members that are on this network can be added to the pool.
Changing this creates a new pool.
"""
return pulumi.get(self, "subnet_id")
@subnet_id.setter
def subnet_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subnet_id", value)
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> Optional[pulumi.Input[str]]:
"""
The owner of the member. Required if admin wants to
create a pool member for another tenant. Changing this creates a new member.
"""
return pulumi.get(self, "tenant_id")
@tenant_id.setter
def tenant_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tenant_id", value)
class PoolV1(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
lb_method: Optional[pulumi.Input[str]] = None,
lb_provider: Optional[pulumi.Input[str]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
monitor_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a V1 load balancer pool resource within OpenStack.
## Example Usage
```python
import pulumi
import pulumi_openstack as openstack
pool1 = openstack.loadbalancer.PoolV1("pool1",
lb_method="ROUND_ROBIN",
lb_provider="haproxy",
monitor_ids=["67890"],
protocol="HTTP",
subnet_id="12345")
```
## Complete Load Balancing Stack Example
```python
import pulumi
import pulumi_openstack as openstack
network1 = openstack.networking.Network("network1", admin_state_up=True)
subnet1 = openstack.networking.Subnet("subnet1",
cidr="192.168.199.0/24",
ip_version=4,
network_id=network1.id)
secgroup1 = openstack.compute.SecGroup("secgroup1",
description="Rules for secgroup_1",
rules=[
openstack.compute.SecGroupRuleArgs(
cidr="0.0.0.0/0",
from_port=-1,
ip_protocol="icmp",
to_port=-1,
),
openstack.compute.SecGroupRuleArgs(
cidr="0.0.0.0/0",
from_port=80,
ip_protocol="tcp",
to_port=80,
),
])
instance1 = openstack.compute.Instance("instance1",
networks=[openstack.compute.InstanceNetworkArgs(
uuid=network1.id,
)],
security_groups=[
"default",
secgroup1.name,
])
instance2 = openstack.compute.Instance("instance2",
networks=[openstack.compute.InstanceNetworkArgs(
uuid=network1.id,
)],
security_groups=[
"default",
secgroup1.name,
])
monitor1 = openstack.loadbalancer.MonitorV1("monitor1",
admin_state_up="true",
delay=30,
max_retries=3,
timeout=5,
type="TCP")
pool1 = openstack.loadbalancer.PoolV1("pool1",
lb_method="ROUND_ROBIN",
monitor_ids=[monitor1.id],
protocol="TCP",
subnet_id=subnet1.id)
member1 = openstack.loadbalancer.MemberV1("member1",
address=instance1.access_ip_v4,
pool_id=pool1.id,
port=80)
member2 = openstack.loadbalancer.MemberV1("member2",
address=instance2.access_ip_v4,
pool_id=pool1.id,
port=80)
vip1 = openstack.loadbalancer.Vip("vip1",
pool_id=pool1.id,
port=80,
protocol="TCP",
subnet_id=subnet1.id)
```
## Notes
The `member` block is deprecated in favor of the `loadbalancer.MemberV1` resource.
## Import
Load Balancer Pools can be imported using the `id`, e.g.
```sh
$ pulumi import openstack:loadbalancer/poolV1:PoolV1 pool_1 b255e6ba-02ad-43e6-8951-3428ca26b713
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] lb_method: The algorithm used to distribute load between the
members of the pool. The current specification supports 'ROUND_ROBIN' and
'LEAST_CONNECTIONS' as valid values for this attribute.
:param pulumi.Input[str] lb_provider: The backend load balancing provider. For example:
`haproxy`, `F5`, etc.
:param pulumi.Input[Sequence[pulumi.Input[str]]] members: An existing node to add to the pool. Changing this
updates the members of the pool. The member object structure is documented
below. Please note that the `member` block is deprecated in favor of the
`loadbalancer.MemberV1` resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] monitor_ids: A list of IDs of monitors to associate with the
pool.
:param pulumi.Input[str] name: The name of the pool. Changing this updates the name of
the existing pool.
:param pulumi.Input[str] protocol: The protocol used by the pool members, you can use
either 'TCP, 'HTTP', or 'HTTPS'. Changing this creates a new pool.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create an LB pool. If omitted, the
`region` argument of the provider is used. Changing this creates a new
LB pool.
:param pulumi.Input[str] subnet_id: The network on which the members of the pool will be
located. Only members that are on this network can be added to the pool.
Changing this creates a new pool.
:param pulumi.Input[str] tenant_id: The owner of the member. Required if admin wants to
create a pool member for another tenant. Changing this creates a new member.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: PoolV1Args,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a V1 load balancer pool resource within OpenStack.
## Example Usage
```python
import pulumi
import pulumi_openstack as openstack
pool1 = openstack.loadbalancer.PoolV1("pool1",
lb_method="ROUND_ROBIN",
lb_provider="haproxy",
monitor_ids=["67890"],
protocol="HTTP",
subnet_id="12345")
```
## Complete Load Balancing Stack Example
```python
import pulumi
import pulumi_openstack as openstack
network1 = openstack.networking.Network("network1", admin_state_up=True)
subnet1 = openstack.networking.Subnet("subnet1",
cidr="192.168.199.0/24",
ip_version=4,
network_id=network1.id)
secgroup1 = openstack.compute.SecGroup("secgroup1",
description="Rules for secgroup_1",
rules=[
openstack.compute.SecGroupRuleArgs(
cidr="0.0.0.0/0",
from_port=-1,
ip_protocol="icmp",
to_port=-1,
),
openstack.compute.SecGroupRuleArgs(
cidr="0.0.0.0/0",
from_port=80,
ip_protocol="tcp",
to_port=80,
),
])
instance1 = openstack.compute.Instance("instance1",
networks=[openstack.compute.InstanceNetworkArgs(
uuid=network1.id,
)],
security_groups=[
"default",
secgroup1.name,
])
instance2 = openstack.compute.Instance("instance2",
networks=[openstack.compute.InstanceNetworkArgs(
uuid=network1.id,
)],
security_groups=[
"default",
secgroup1.name,
])
monitor1 = openstack.loadbalancer.MonitorV1("monitor1",
admin_state_up="true",
delay=30,
max_retries=3,
timeout=5,
type="TCP")
pool1 = openstack.loadbalancer.PoolV1("pool1",
lb_method="ROUND_ROBIN",
monitor_ids=[monitor1.id],
protocol="TCP",
subnet_id=subnet1.id)
member1 = openstack.loadbalancer.MemberV1("member1",
address=instance1.access_ip_v4,
pool_id=pool1.id,
port=80)
member2 = openstack.loadbalancer.MemberV1("member2",
address=instance2.access_ip_v4,
pool_id=pool1.id,
port=80)
vip1 = openstack.loadbalancer.Vip("vip1",
pool_id=pool1.id,
port=80,
protocol="TCP",
subnet_id=subnet1.id)
```
## Notes
The `member` block is deprecated in favor of the `loadbalancer.MemberV1` resource.
## Import
Load Balancer Pools can be imported using the `id`, e.g.
```sh
$ pulumi import openstack:loadbalancer/poolV1:PoolV1 pool_1 b255e6ba-02ad-43e6-8951-3428ca26b713
```
:param str resource_name: The name of the resource.
:param PoolV1Args args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(PoolV1Args, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
lb_method: Optional[pulumi.Input[str]] = None,
lb_provider: Optional[pulumi.Input[str]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
monitor_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = PoolV1Args.__new__(PoolV1Args)
if lb_method is None and not opts.urn:
raise TypeError("Missing required property 'lb_method'")
__props__.__dict__["lb_method"] = lb_method
__props__.__dict__["lb_provider"] = lb_provider
if members is not None and not opts.urn:
warnings.warn("""Use openstack_lb_member_v1 instead""", DeprecationWarning)
pulumi.log.warn("""members is deprecated: Use openstack_lb_member_v1 instead""")
__props__.__dict__["members"] = members
__props__.__dict__["monitor_ids"] = monitor_ids
__props__.__dict__["name"] = name
if protocol is None and not opts.urn:
raise TypeError("Missing required property 'protocol'")
__props__.__dict__["protocol"] = protocol
__props__.__dict__["region"] = region
if subnet_id is None and not opts.urn:
raise TypeError("Missing required property 'subnet_id'")
__props__.__dict__["subnet_id"] = subnet_id
__props__.__dict__["tenant_id"] = tenant_id
super(PoolV1, __self__).__init__(
'openstack:loadbalancer/poolV1:PoolV1',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
lb_method: Optional[pulumi.Input[str]] = None,
lb_provider: Optional[pulumi.Input[str]] = None,
members: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
monitor_ids: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
protocol: Optional[pulumi.Input[str]] = None,
region: Optional[pulumi.Input[str]] = None,
subnet_id: Optional[pulumi.Input[str]] = None,
tenant_id: Optional[pulumi.Input[str]] = None) -> 'PoolV1':
"""
Get an existing PoolV1 resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] lb_method: The algorithm used to distribute load between the
members of the pool. The current specification supports 'ROUND_ROBIN' and
'LEAST_CONNECTIONS' as valid values for this attribute.
:param pulumi.Input[str] lb_provider: The backend load balancing provider. For example:
`haproxy`, `F5`, etc.
:param pulumi.Input[Sequence[pulumi.Input[str]]] members: An existing node to add to the pool. Changing this
updates the members of the pool. The member object structure is documented
below. Please note that the `member` block is deprecated in favor of the
`loadbalancer.MemberV1` resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] monitor_ids: A list of IDs of monitors to associate with the
pool.
:param pulumi.Input[str] name: The name of the pool. Changing this updates the name of
the existing pool.
:param pulumi.Input[str] protocol: The protocol used by the pool members, you can use
either 'TCP, 'HTTP', or 'HTTPS'. Changing this creates a new pool.
:param pulumi.Input[str] region: The region in which to obtain the V2 Networking client.
A Networking client is needed to create an LB pool. If omitted, the
`region` argument of the provider is used. Changing this creates a new
LB pool.
:param pulumi.Input[str] subnet_id: The network on which the members of the pool will be
located. Only members that are on this network can be added to the pool.
Changing this creates a new pool.
:param pulumi.Input[str] tenant_id: The owner of the member. Required if admin wants to
create a pool member for another tenant. Changing this creates a new member.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _PoolV1State.__new__(_PoolV1State)
__props__.__dict__["lb_method"] = lb_method
__props__.__dict__["lb_provider"] = lb_provider
__props__.__dict__["members"] = members
__props__.__dict__["monitor_ids"] = monitor_ids
__props__.__dict__["name"] = name
__props__.__dict__["protocol"] = protocol
__props__.__dict__["region"] = region
__props__.__dict__["subnet_id"] = subnet_id
__props__.__dict__["tenant_id"] = tenant_id
return PoolV1(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="lbMethod")
def lb_method(self) -> pulumi.Output[str]:
"""
The algorithm used to distribute load between the
members of the pool. The current specification supports 'ROUND_ROBIN' and
'LEAST_CONNECTIONS' as valid values for this attribute.
"""
return pulumi.get(self, "lb_method")
@property
@pulumi.getter(name="lbProvider")
def lb_provider(self) -> pulumi.Output[str]:
"""
The backend load balancing provider. For example:
`haproxy`, `F5`, etc.
"""
return pulumi.get(self, "lb_provider")
@property
@pulumi.getter
def members(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
An existing node to add to the pool. Changing this
updates the members of the pool. The member object structure is documented
below. Please note that the `member` block is deprecated in favor of the
`loadbalancer.MemberV1` resource.
"""
return pulumi.get(self, "members")
@property
@pulumi.getter(name="monitorIds")
def monitor_ids(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
A list of IDs of monitors to associate with the
pool.
"""
return pulumi.get(self, "monitor_ids")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the pool. Changing this updates the name of
the existing pool.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def protocol(self) -> pulumi.Output[str]:
"""
The protocol used by the pool members, you can use
either 'TCP, 'HTTP', or 'HTTPS'. Changing this creates a new pool.
"""
return pulumi.get(self, "protocol")
@property
@pulumi.getter
def region(self) -> pulumi.Output[str]:
"""
The region in which to obtain the V2 Networking client.
A Networking client is needed to create an LB pool. If omitted, the
`region` argument of the provider is used. Changing this creates a new
LB pool.
"""
return pulumi.get(self, "region")
@property
@pulumi.getter(name="subnetId")
def subnet_id(self) -> pulumi.Output[str]:
"""
The network on which the members of the pool will be
located. Only members that are on this network can be added to the pool.
Changing this creates a new pool.
"""
return pulumi.get(self, "subnet_id")
@property
@pulumi.getter(name="tenantId")
def tenant_id(self) -> pulumi.Output[str]:
"""
The owner of the member. Required if admin wants to
create a pool member for another tenant. Changing this creates a new member.
"""
return pulumi.get(self, "tenant_id")
| 42.126659
| 134
| 0.609713
| 4,182
| 34,923
| 4.94285
| 0.065758
| 0.078226
| 0.080596
| 0.057472
| 0.927096
| 0.910793
| 0.89986
| 0.888588
| 0.878864
| 0.870398
| 0
| 0.01161
| 0.29462
| 34,923
| 828
| 135
| 42.177536
| 0.827515
| 0.465939
| 0
| 0.775758
| 1
| 0
| 0.090844
| 0.010933
| 0
| 0
| 0
| 0
| 0
| 1
| 0.157576
| false
| 0.00303
| 0.015152
| 0
| 0.266667
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
97e697c360d175a4efbb79b71a88204f4e406a30
| 44
|
py
|
Python
|
src/UofG_PP/__init__.py
|
UofG-CSP3/morion
|
4d013b80ab0edc1154bae18dc00c8a8d3d6c7950
|
[
"MIT"
] | null | null | null |
src/UofG_PP/__init__.py
|
UofG-CSP3/morion
|
4d013b80ab0edc1154bae18dc00c8a8d3d6c7950
|
[
"MIT"
] | null | null | null |
src/UofG_PP/__init__.py
|
UofG-CSP3/morion
|
4d013b80ab0edc1154bae18dc00c8a8d3d6c7950
|
[
"MIT"
] | null | null | null |
from . import readers
from . import writers
| 14.666667
| 21
| 0.772727
| 6
| 44
| 5.666667
| 0.666667
| 0.588235
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.181818
| 44
| 2
| 22
| 22
| 0.944444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
3f337d96dd7b62b83842390e093f34a720ce734c
| 3,287
|
py
|
Python
|
src/tests/skill_test.py
|
Kidron-Hou/category_division
|
e8163f16964d103c6fb932ca9caa30c04da89992
|
[
"BSD-3-Clause"
] | null | null | null |
src/tests/skill_test.py
|
Kidron-Hou/category_division
|
e8163f16964d103c6fb932ca9caa30c04da89992
|
[
"BSD-3-Clause"
] | null | null | null |
src/tests/skill_test.py
|
Kidron-Hou/category_division
|
e8163f16964d103c6fb932ca9caa30c04da89992
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# @File : skill_test.py
# @Description:
# @Time : 2020-6-1 上午 10:20
# @Author : Hou
import re
pattern_after = re.compile('预中标公告 |评审公示 |需求评审 |预中标公示 |预审结果公示 |预审结果公告 |候选人公告 |候选人公示 |候选人结果公示 |候选人结果公告 |'
'成交公告 |成交通告 |成交公示 |合同公示 |中选公告 |中选公示 |合同公告 |信息公示表 |结果公告 |结果公示 |结果通知书 |终止公告 |'
'终止公示 |失败公告 |失败的公告 |作废公告 |废标公告 |流标公示 |流标公告 |流标通知|无效公告 |'
'中标候选人 |中标候选人:|中标候选人:|结果公示表 |中标人:|中标公示 |中标单位:|'
'中标单位:|中标单位 |中标人 |中 标 人 |中标人为|中标人公示 |中标候选单位:|中标供应商 |供应商:|供应商名称:|供应商名称 |中标人名称:|中标单位名称:|中标单位名称 |评标结果:|供应商信息:|'
'成交供应商 |成交公告:|中标金额:|中标候选人单位名称 |中标人\(乙方\):|供应商(乙方):|评标报告:|中选人名称:|'
'成交人:|中标通知书 |成交单位:|中标内容:|中标企业|中标项目名称:'
'预审公告 |邀请招标 |谈判公告 |竞争性谈价 |竞争性谈判 |询价公告 |来源公告 |单一来源采购 |采购公告 |'
'磋商公告 |竞争性磋商 |竞价公告 |遴选公告 |议价公告 |议价信息 |网络竞价 |出让公告 |土地出让 |挂牌出让 |拍卖公告 |公开招标 |'
'招标公告 |项目公告 |采购公示 |采购计划 |采购预告 |采购需求 |采购申前公示 |采购前公示 |需求公告 |招标预告 |征求意见 |预公告 |'
'需求信息 |需求计划 |选择计划 |预公示 |启动公示 |招标公示 |标前公示 |需求公示 |销售预告 |项目申前公示 |变更公告 |更正公告 |'
'变更说明 |推迟 |暂停公告 |变更计划 |变更文件 |调整通知 |变更公示 |更改为|更正为|延期 |变更通知 |补充说明 |补充通知 |补充文件 |补充招标公告 |'
'招标控制价 |补充公告 |项目答疑 |变更答疑 |补疑 |答疑明细 |补遗 |澄清 |答疑澄更 |评标 |流标 |废标 |资格要求 |资格要求:|资格条件:|采购通知 |供应商基本条件:|竞价须知 |'
'投标人资格 |投标人资格条件 |询价通知书 |采购说明:|报价须知:|报名公告 |招标文件 |货物采购 |服务采购 |工程分包 |错字更正 |'
'关于暂停|更改通知|澄清公告|抽签公告|更正公示 |资格审核 |答疑变更|项目要求:|单轮竞价 |抽签时间及地点 ')
key_words = ['预中标公告 ','评审公示 ','需求评审 ','预中标公示 ','预审结果公示 ','预审结果公告 ','候选人公告 ','候选人公示 ','候选人结果公示 ','候选人结果公告 ','成交公告 ','成交通告 ','成交公示 ','合同公示 ','中选公告 ','中选公示 ','合同公告 ','信息公示表 ','结果公告 ','结果公示 ','结果通知书 ','终止公告 ','终止公示 ','失败公告 ','失败的公告 ','作废公告 ','废标公告 ','流标公示 ','流标公告 ','流标通知','无效公告 ','中标候选人 ','中标候选人:','结果公示表 ','中标人:','中标公示 ','中标单位:','中标单位:','中标单位 ','中标人 ','中 标 人 ','中标人为','中标人公示 ','中标候选单位:','中标供应商 ','供应商:','供应商名称:','供应商名称 ','中标人名称:','中标单位名称:','中标单位名称 ','评标结果:','供应商信息:','成交供应商 ','成交公告:','中标金额:','中标候选人单位名称 ','中标人\(乙方\):','供应商(乙方):','评标报告:','中选人名称:','成交人:','中标通知书 ','成交单位:','中标内容:','中标企业','中标项目名称:预审公告 ','邀请招标 ','谈判公告 ','竞争性谈价 ','竞争性谈判 ','询价公告 ','来源公告 ','单一来源采购 ','采购公告 ','磋商公告 ','竞争性磋商 ','竞价公告 ','遴选公告 ','议价公告 ','议价信息 ','网络竞价 ','出让公告 ','土地出让 ','挂牌出让 ','拍卖公告 ','公开招标 ','招标公告 ','项目公告 ','采购公示 ','采购计划 ','采购预告 ','采购需求 ','采购申前公示 ','采购前公示 ','需求公告 ','招标预告 ','征求意见 ','预公告 ','需求信息 ','需求计划 ','选择计划 ','预公示 ','启动公示 ','招标公示 ','标前公示 ','需求公示 ','销售预告 ','项目申前公示 ','变更公告 ','更正公告 ','变更说明 ','推迟 ','暂停公告 ','变更计划 ','变更文件 ','调整通知 ','变更公示 ','更改为','更正为','延期 ','变更通知 ','补充说明 ','补充通知 ','补充文件 ','补充招标公告 ','招标控制价 ','补充公告 ','项目答疑 ','变更答疑 ','补疑 ','答疑明细 ','补遗 ','澄清 ','答疑澄更 ','评标 ','流标 ','废标 ','资格要求 ','资格要求:','资格条件:','采购通知 ','供应商基本条件:','竞价须知 ','投标人资格 ','投标人资格条件 ','询价通知书 ','采购说明:','报价须知:','报名公告 ','招标文件 ','货物采购 ','服务采购 ','工程分包 ','错字更正 ','关于暂停','更改通知','澄清公告','抽签公告','更正公示 ','资格审核 ','答疑变更','项目要求:','单轮竞价 ','抽签时间及地点 ']
str_content = """
"contentText": " 酒泉元隆工程咨询有限公司受瓜州县县乡道路管理站的委托,对G312线七瓜公路项目电力线路迁改工程(施工一标段)进行招标, 并于2020-05-08 09:00:00开标、评标,开评标会结束后根据有关法律、法规要求,现对中标候选人进行公示。 特此公示 标段(包)编号:gzjt20200416000200101 标段(包)名称:G312线七瓜公路项目电力线路迁改工程(施工一标段) 招标代理机构:酒泉元隆工程咨询有限公司 招标人:瓜州县县乡道路管理站 经评标委员会评审,确定中标候选人为: 第一中标候选人:酒泉市兴达建筑安装工程有限责任公司 第二中标候选人:甘肃宝光电力安装工程有限公司 第三中标候选人:广东联网电力有限公司",
"""
print(pattern_after.findall(str_content))
| 102.71875
| 1,379
| 0.567082
| 415
| 3,287
| 4.477108
| 0.539759
| 0.017223
| 0.013994
| 0.019376
| 0.763724
| 0.763724
| 0.763724
| 0.763724
| 0.763724
| 0.763724
| 0
| 0.017758
| 0.17767
| 3,287
| 32
| 1,380
| 102.71875
| 0.669626
| 0.040767
| 0
| 0
| 0
| 0.428571
| 0.712834
| 0.108323
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.047619
| 0
| 0.047619
| 0.047619
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3f447e8da33d7282f25d11d8d67e376544ace695
| 4,452
|
py
|
Python
|
tests/test_haproxy_util.py
|
monetate/check-haproxy-stats
|
b47d947eea994e4f728b2d004acae13f52c63cb5
|
[
"MIT"
] | null | null | null |
tests/test_haproxy_util.py
|
monetate/check-haproxy-stats
|
b47d947eea994e4f728b2d004acae13f52c63cb5
|
[
"MIT"
] | 1
|
2019-04-16T21:52:25.000Z
|
2019-04-16T21:52:25.000Z
|
tests/test_haproxy_util.py
|
monetate/check-haproxy-stats
|
b47d947eea994e4f728b2d004acae13f52c63cb5
|
[
"MIT"
] | 2
|
2019-02-21T18:54:09.000Z
|
2019-07-17T22:44:43.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Tests for `haproxy_util` module."""
import unittest
from mock import Mock, PropertyMock, patch
import check_haproxy_stats.haproxy_util
class TestHaProxy_Util(unittest.TestCase):
"""Test cases for haproxy_util."""
def test_get_request_stats_no_traffic(self):
"""Test scenario where we receive no traffic at all."""
with patch("haproxystats.HAProxyServer") as haproxy_server_instance:
check_trk_backend = Mock(hrsp_1xx=0, hrsp_2xx=0, hrsp_3xx=0, hrsp_4xx=0, hrsp_5xx=0, hrsp_other=0)
type(check_trk_backend).name = PropertyMock(return_value="check-trk") # "name" is special
haproxy_server = Mock(backends=[check_trk_backend])
haproxy_server_instance.return_value = haproxy_server
result = check_haproxy_stats.haproxy_util.get_request_stats(
base_url_path="127.0.0.1/haproxy/stats", username="someone", password="password", backend="check-trk")
expected_result = (0, 0, 0, 0, 0, 0)
self.assertEqual(result, expected_result)
def test_get_request_stats_some_traffic(self):
"""Test scenario where we receive various traffic resulting in some {1,2,3,4,5, other}xx codes."""
with patch("haproxystats.HAProxyServer") as haproxy_server_instance:
check_trk_backend = Mock(hrsp_1xx=1, hrsp_2xx=2, hrsp_3xx=3, hrsp_4xx=4, hrsp_5xx=5, hrsp_other=0)
type(check_trk_backend).name = PropertyMock(return_value="check-trk") # "name" is special
haproxy_server = Mock(backends=[check_trk_backend])
haproxy_server_instance.return_value = haproxy_server
result = check_haproxy_stats.haproxy_util.get_request_stats(
base_url_path="127.0.0.1/haproxy/stats", username="someone", password="password", backend="check-trk")
expected_result = (1, 2, 3, 4, 5, 0)
self.assertEqual(result, expected_result)
def test_get_hrsp_5xx_ratio_no_traffic(self):
"""Test scenario where we do not receive any traffic at all."""
with patch("check_haproxy_stats.haproxy_util.get_request_stats") as get_request_stats:
get_request_stats.side_effect = [(0, 0, 0, 0, 0, 0), (0, 0, 0, 0, 0, 0)]
r = check_haproxy_stats.haproxy_util.get_hrsp_5xx_ratio(
backend="check-trk",
base_url_path="127.0.0.1/haproxy/stats",
username="someone",
password="password",
interval=0)
self.assertEqual(r, 0.0)
def test_get_hrsp_5xx_ratio_no_traffic_during_interval(self):
"""Test scenario where we do not receive any traffic at all."""
with patch("check_haproxy_stats.haproxy_util.get_request_stats") as get_request_stats:
get_request_stats.side_effect = [(1, 1, 1, 1, 1, 1, 1), (1, 1, 1, 1, 1, 1, 1, 1)]
r = check_haproxy_stats.haproxy_util.get_hrsp_5xx_ratio(
backend="check-trk",
base_url_path="127.0.0.1/haproxy/stats",
username="someone",
password="password",
interval=0)
self.assertEqual(r, 0.0)
def test_get_hrsp_5xx_ratio_some_traffic_all_500(self):
"""Test scenario where we receive all traffic to be 500."""
with patch("check_haproxy_stats.haproxy_util.get_request_stats") as get_request_stats:
get_request_stats.side_effect = [(0, 0, 0, 0, 0, 0), (0, 0, 0, 0, 1, 0)]
r = check_haproxy_stats.haproxy_util.get_hrsp_5xx_ratio(
backend="check-trk",
base_url_path="127.0.0.1/haproxy/stats",
username="someone",
password="password",
interval=0)
self.assertEqual(r, 1.0)
def test_get_hrsp_5xx_ratio_some_traffic_half_500(self):
"""Test scenario where we receive half of traffic resulting in 5xx HTTP code."""
with patch("check_haproxy_stats.haproxy_util.get_request_stats") as get_request_stats:
get_request_stats.side_effect = [(0, 0, 0, 0, 0, 0), (0, 1, 0, 0, 1, 0)]
r = check_haproxy_stats.haproxy_util.get_hrsp_5xx_ratio(
backend="check-trk",
base_url_path="127.0.0.1/haproxy/stats",
username="someone",
password="password",
interval=0)
self.assertEqual(r, 0.5)
| 51.172414
| 118
| 0.638814
| 615
| 4,452
| 4.338211
| 0.157724
| 0.029985
| 0.03036
| 0.034483
| 0.868066
| 0.836582
| 0.834708
| 0.781484
| 0.77024
| 0.718516
| 0
| 0.046936
| 0.248652
| 4,452
| 86
| 119
| 51.767442
| 0.750673
| 0.118598
| 0
| 0.6875
| 0
| 0
| 0.142268
| 0.100515
| 0
| 0
| 0
| 0
| 0.09375
| 1
| 0.09375
| false
| 0.09375
| 0.046875
| 0
| 0.15625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
45017ed2c4b81d468f0f4d475c8756d8b38b2153
| 209
|
py
|
Python
|
maayanlab_bioinformatics/utils/__init__.py
|
MaayanLab/maayanlab-bioinformatics
|
f84bda02a8841a65d4c72e491129cdc339fb73b3
|
[
"Apache-2.0"
] | 4
|
2020-07-16T11:49:59.000Z
|
2021-08-03T00:54:16.000Z
|
maayanlab_bioinformatics/utils/__init__.py
|
MaayanLab/maayanlab-bioinformatics
|
f84bda02a8841a65d4c72e491129cdc339fb73b3
|
[
"Apache-2.0"
] | 2
|
2020-05-21T17:04:30.000Z
|
2022-02-14T21:29:54.000Z
|
maayanlab_bioinformatics/utils/__init__.py
|
MaayanLab/maayanlab-bioinformatics
|
f84bda02a8841a65d4c72e491129cdc339fb73b3
|
[
"Apache-2.0"
] | null | null | null |
'''This module contains general utility functions for convenient analysis
'''
from maayanlab_bioinformatics.utils.fetch_save_read import fetch_save_read
from maayanlab_bioinformatics.utils.merge import merge
| 34.833333
| 74
| 0.861244
| 27
| 209
| 6.444444
| 0.666667
| 0.149425
| 0.310345
| 0.367816
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 209
| 5
| 75
| 41.8
| 0.915789
| 0.334928
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
45094d7937a1f99e9eab6dd2ea6dc88805c504b7
| 124
|
py
|
Python
|
functiongraph.py
|
webclinic017/Backtrader-Samples
|
dc3744e9543a38eb91be213cddd35b435b67ac87
|
[
"0BSD"
] | 1
|
2021-12-17T14:14:45.000Z
|
2021-12-17T14:14:45.000Z
|
functiongraph.py
|
webclinic017/Backtrader-Samples
|
dc3744e9543a38eb91be213cddd35b435b67ac87
|
[
"0BSD"
] | null | null | null |
functiongraph.py
|
webclinic017/Backtrader-Samples
|
dc3744e9543a38eb91be213cddd35b435b67ac87
|
[
"0BSD"
] | null | null | null |
import pygal
pygal.Bar()(1, 3, 3, 7)(1, 6, 6, 4).render()
pygal.Bar()(1, 3, 3, 7)(1, 6, 6, 4).render_to_file("simple.svg")
| 24.8
| 64
| 0.580645
| 28
| 124
| 2.5
| 0.464286
| 0.228571
| 0.257143
| 0.285714
| 0.628571
| 0.628571
| 0.628571
| 0.628571
| 0.628571
| 0.628571
| 0
| 0.149533
| 0.137097
| 124
| 4
| 65
| 31
| 0.504673
| 0
| 0
| 0
| 0
| 0
| 0.080645
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
450bb78b2fb79bc8fecc79c1f04e60e2401241f4
| 7,594
|
py
|
Python
|
monk/system/graphs/line.py
|
Sanskar329/monk_v1
|
51a497a925ec1fb2c8fef1d51245ea7040a5a65a
|
[
"Apache-2.0"
] | 7
|
2020-07-26T08:37:29.000Z
|
2020-10-30T10:23:11.000Z
|
monk/system/graphs/line.py
|
mursalfk/monk_v1
|
62f34a52f242772186ffff7e56764e958fbcd920
|
[
"Apache-2.0"
] | null | null | null |
monk/system/graphs/line.py
|
mursalfk/monk_v1
|
62f34a52f242772186ffff7e56764e958fbcd920
|
[
"Apache-2.0"
] | null | null | null |
from system.graphs.imports import *
from system.imports import *
@accepts([list, type(np.array([1, 2]))], [list, type(np.array([1, 2]))], str, show_img=bool, save_img=bool, post_trace=False)
#@TraceFunction(trace_args=True, trace_rv=True)
def create_train_test_plots_accuracy(plots, labels, log_dir, show_img=False, save_img=False):
'''
Create line graphs for training and testing accuracies
Args:
plots (list): plots[0] contains training data
plots[1] contains validation data
labels (list): Coresponding X-Axis labels
log_dir (str): Path to folder to save plot
show_img (bool): If True, plot is displayed
save_img (bool): If True, plot is saved.
verbose (str): If True, prints logs for analysis
Returns:
None
'''
plt.plot(plots[0], marker='o', label='Training')
plt.plot(plots[1], marker='x', label='Validation')
plt.gca().legend(('Training','Validation'))
plt.xlabel(labels[0]);
plt.ylabel(labels[1]);
file_name = log_dir + "train_val_accuracy.png"
plt.savefig(file_name)
plt.clf()
@accepts([list, type(np.array([1, 2]))], [list, type(np.array([1, 2]))], str, show_img=bool, save_img=bool, post_trace=False)
#@TraceFunction(trace_args=True, trace_rv=True)
def create_train_test_plots_loss(plots, labels, log_dir, show_img=False, save_img=False):
'''
Create line graphs for training and testing losses
Args:
plots (list): plots[0] contains training data
plots[1] contains validation data
labels (list): Coresponding X-Axis labels
log_dir (str): Path to folder to save plot
show_img (bool): If True, plot is displayed
save_img (bool): If True, plot is saved.
verbose (str): If True, prints logs for analysis
Returns:
None
'''
plt.plot(plots[0], marker='o', label='Training')
plt.plot(plots[1], marker='x', label='Validation')
plt.gca().legend(('Training','Validation'))
plt.xlabel(labels[0]);
plt.ylabel(labels[1]);
file_name = log_dir + "train_val_loss.png"
plt.savefig(file_name)
plt.clf()
@accepts([list, type(np.array([1, 2]))], dict, post_trace=False)
#@TraceFunction(trace_args=False, trace_rv=True)
def training_accuracy_curve(data, system_dict):
'''
Create training accuracy line graph
Args:
data (list): Entire data
system_dict (dict): System dictionary storing experiment state and set variables
Returns:
None
'''
plt.figure(figsize=(20,10))
for i in range(len(data)):
if(not data[i]["training"]["status"]):
print("Training for {} - Incomplete".format(self.project_experiment_list[i]));
value = np.zeros(1);
xkcd = mcd.XKCD_COLORS["xkcd:" + overlap[i]].upper()
plt.plot(value, marker='o', label=system_dict["local"]["project_experiment_list"][i], color=xkcd, linewidth=5.0);
else:
value = np.load(data[i]["training"]["outputs"]["log_train_acc_history_relative"], allow_pickle=True);
xkcd = mcd.XKCD_COLORS["xkcd:" + overlap[i]].upper()
plt.plot(value, marker='o', label=system_dict["local"]["project_experiment_list"][i], color=xkcd, linewidth=5.0);
plt.gca().legend(tuple(system_dict["local"]["project_experiment_list"]));
plt.xlabel("Epoch Num");
plt.ylabel("Accuracy");
plt.title("Training Accuracy Curve");
plt.autoscale()
file_name = system_dict["comparison_dir"] + "train_accuracy.png"
plt.savefig(file_name)
plt.clf()
@accepts([list, type(np.array([1, 2]))], dict, post_trace=False)
#@TraceFunction(trace_args=False, trace_rv=True)
def validation_accuracy_curve(data, system_dict):
'''
Create validation accuracy line graph
Args:
data (list): Entire data
system_dict (dict): System dictionary storing experiment state and set variables
Returns:
None
'''
plt.figure(figsize=(20,10))
for i in range(len(data)):
if(not data[i]["training"]["status"]):
value = np.zeros(1);
xkcd = mcd.XKCD_COLORS["xkcd:" + overlap[i]].upper()
plt.plot(value, marker='o', label=system_dict["local"]["project_experiment_list"][i], color=xkcd, linewidth=5.0);
else:
value = np.load(data[i]["training"]["outputs"]["log_val_acc_history_relative"], allow_pickle=True);
xkcd = mcd.XKCD_COLORS["xkcd:" + overlap[i]].upper()
plt.plot(value, marker='x', label=system_dict["local"]["project_experiment_list"][i], color=xkcd, linewidth=5.0)
plt.gca().legend(tuple(system_dict["local"]["project_experiment_list"]));
plt.xlabel("Epoch Num");
plt.ylabel("Accuracy");
plt.title("Validation Accuracy Curve");
plt.autoscale()
file_name = system_dict["comparison_dir"] + "val_accuracy.png"
plt.savefig(file_name)
plt.clf()
@accepts([list, type(np.array([1, 2]))], dict, post_trace=False)
#@TraceFunction(trace_args=False, trace_rv=True)
def training_loss_curve(data, system_dict):
'''
Create training loss line graph
Args:
data (list): Entire data
system_dict (dict): System dictionary storing experiment state and set variables
Returns:
None
'''
plt.figure(figsize=(20,10))
for i in range(len(data)):
if(not data[i]["training"]["status"]):
value = np.zeros(1);
xkcd = mcd.XKCD_COLORS["xkcd:" + overlap[i]].upper()
plt.plot(value, marker='o', label=system_dict["local"]["project_experiment_list"][i], color=xkcd, linewidth=5.0);
else:
value = np.load(data[i]["training"]["outputs"]["log_train_loss_history_relative"], allow_pickle=True);
xkcd = mcd.XKCD_COLORS["xkcd:" + overlap[i]].upper()
plt.plot(value, marker='o', label=system_dict["local"]["project_experiment_list"][i], color=xkcd, linewidth=5.0)
plt.gca().legend(tuple(system_dict["local"]["project_experiment_list"]));
plt.xlabel("Epoch Num");
plt.ylabel("Loss");
plt.title("Training Loss Curve");
plt.autoscale()
file_name = system_dict["comparison_dir"] + "train_loss.png"
plt.savefig(file_name)
plt.clf()
@accepts([list, type(np.array([1, 2]))], dict, post_trace=False)
#@TraceFunction(trace_args=False, trace_rv=True)
def validation_loss_curve(data, system_dict):
'''
Create validation loss line graph
Args:
data (list): Entire data
system_dict (dict): System dictionary storing experiment state and set variables
Returns:
None
'''
plt.figure(figsize=(20,10))
for i in range(len(data)):
if(not data[i]["training"]["status"]):
value = np.zeros(1);
xkcd = mcd.XKCD_COLORS["xkcd:" + overlap[i]].upper()
plt.plot(value, marker='o', label=system_dict["local"]["project_experiment_list"][i], color=xkcd, linewidth=5.0);
else:
value = np.load(data[i]["training"]["outputs"]["log_val_loss_history_relative"], allow_pickle=True);
xkcd = mcd.XKCD_COLORS["xkcd:" + overlap[i]].upper()
plt.plot(value, marker='o', label=system_dict["local"]["project_experiment_list"][i], color=xkcd, linewidth=5.0)
plt.gca().legend(tuple(system_dict["local"]["project_experiment_list"]));
plt.xlabel("Epoch Num");
plt.ylabel("Loss");
plt.title("Validation Loss Curve");
plt.autoscale()
file_name = system_dict["comparison_dir"] + "val_loss.png"
plt.savefig(file_name)
plt.clf()
| 38.744898
| 125
| 0.639716
| 1,025
| 7,594
| 4.589268
| 0.122927
| 0.05102
| 0.058036
| 0.056122
| 0.962798
| 0.962798
| 0.928784
| 0.928784
| 0.928784
| 0.922194
| 0
| 0.010565
| 0.202265
| 7,594
| 195
| 126
| 38.94359
| 0.765929
| 0.240716
| 0
| 0.76699
| 0
| 0
| 0.186342
| 0.075554
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058252
| false
| 0
| 0.019417
| 0
| 0.07767
| 0.009709
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18c5ffcaeafff5cf7d553d0fca081eadc0cf9fa9
| 7,454
|
py
|
Python
|
RCTF 2019/printer/printer.py
|
p-g-krish/CTF-Writeups
|
05ad6a9ecbc19ceb8890f4581dfee36f16d164aa
|
[
"MIT"
] | 51
|
2018-06-26T09:49:42.000Z
|
2019-09-14T00:06:35.000Z
|
RCTF 2019/printer/printer.py
|
p-g-krish/CTF-Writeups
|
05ad6a9ecbc19ceb8890f4581dfee36f16d164aa
|
[
"MIT"
] | 1
|
2018-06-29T18:40:59.000Z
|
2018-07-09T20:29:41.000Z
|
RCTF 2019/printer/printer.py
|
p-g-krish/CTF-Writeups
|
05ad6a9ecbc19ceb8890f4581dfee36f16d164aa
|
[
"MIT"
] | 22
|
2019-10-03T14:52:43.000Z
|
2022-01-17T08:55:10.000Z
|
#!/usr/bin/env python3
from PIL import Image, ImageDraw
import binascii
DATA_RAW="1b00e0360dbf87deffff00000000090000010007000103e90c00005345542054454152204f4e0d0a434c530d0a4249544d4150203133382c37352c32362c34382c312cffffffffffffffffffffffffffffffff00ffffffffffffffffffffffffffffffffffffffffffffffffffc3ffffffffffffffffffffffffffffffffffffffffffffffffffe7ffffffffffffffffffffffffffffffffffffffffffffffffffe7ffffffffffffffffffffffffffffffffffffffffffffffffffe7ffffffffffffffffffffffffffffffffffffffffffffffffffe7ffffffffffffffffffffffffffffffffffffffffffffffffffe7ffe3fffe1ffffffffff807c03c603fc07c07e0007f7ff01f8067ff007ff803fc07c03fff1ff1f04f8ff1ff1fff1fff3ffcff1f27fc7f1ff3e1ff1ff9ffff1ff1fc1fcff8ff1fff1fff3ffefe3f87f8ff9feff8ff1ff9ffff8ff1fc3fc7fcff1fff1fff1ffefc7fc7f9ff8fdffc7f1ff9ffff8ff1fc7fe3fc7f1fff1fff1ffefcffe7f1ff8f9ffc3f1ff9ffffc7f1fc7fe3fe3f1fff1fff0ffef8ffe7f1ff0fbffe3f1ff9ffffc7f1fc7fe3fe3f1fff1fff0ffef8ffe7e1ff8f3ffe3f1ff9ffffe3f1fc7fe3ff1f1fff1fff47fef8ffe7e3ff9f7ffe1f1ff9ffffe3f1fc7ff3ff8e1fff1fff47fef9ffe7e3ffffffff1f1ff9fffff1f1fc7ff3ff8c1fff1fff63fef9ffe7f1ffffffff1f1ff9fffff1f1fc7ff3ffc11fff1fff63fef9ffe7f1ffffffff1f1ff9fffff1f1fc7fe3ffe31fff1fff71fef9ffe7f1ffffffff1f1ff9fffff8f1fc7fe3ffe71fff1fff71fef8ffe7f8ffffffff0f1ff9fffff8f1fc7fe3ffcf1fff1fff78fef8ffe7fcffffffff0f1ff9fffffc61fc7fe7ff9f1fff1fff78fef8ffc7fe3fffffff0f1ff9fffffc41fc7fc7ff3f1fff1fff7c7efcffc7ff83ffffff0f9ff1fffffe11fc3f8fff7f1fff1fff7c7efc7fa7ff87ffffff0f9fe9fffffe31fc1f1ffe7f1fff1fff7e3efe3e67fe3fffffff1f8f99ffffff31fc403fe01f1fff1fff7e3eff80e0fc7fffffff1fc039fffffe71fc79ffffff1fff1fff7f1efff3eff8ffffffff1ff0f9fffffef1fc7fffffff1fff1fff7f0efffffff8ffffffff1ffff9fffffcf1fc7fffffff1fff1fff7f8efffffff8fffffffe1ffff9fffff9f1fc7fffffff1fff1fff7f86fffffff8ff9f7ffe3ffff9fffffbf1fc7fffffff1fff1fff7fc6fffffff8ff0f3ffe3ffff9fffff7f1fc7fffffff1fff1fff7fc2fffffff8ff8fbffc7ffff9ffffe7f1fc7fffffff1fff1fff7fe2fffffff8ff8f9ffc7ffff9ffffcff1fc7fffffff1fff1fff7ff0fffffffcff9f9ff8fffff9ffff8ff1fc7fffffff1fff1fff7ff0fffffffc7f9f8ff1fffff9ffff0ff0fc3fffffff1fff0ffe7ff8fffffffe1e7f83e3fffff8fffc03c03c0fffffff03e000780ff83fffffff80fff80ffffff83ffffffffdffffffff3ffffffffffffffffffffffffffffffffbffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff0d0a4249544d4150203133302c3537392c32392c33322c312cffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc7fffffffffffffffffffffffffffffffffffffffffffffffffffffffe38fffffffffffffffffffffffffffffffffffffffffffffffffffffffdff7ffffffffffffffffffffffffffffffffffffffffffffffffffffff9ff3ffffffffffffffffffffffffffffffffffffffffffffffffffffff9ff3fffffffffffff9ffefbffc7ffffffe1fff8fffffffc3ffffffffff9ff3ff8ffffffffff0ffefbff39ff007f9c7fe72ffffff3c3fc07fffff87e78463f803ff01f0ffe7bfefefff7ff3f3f9f8fffffeff3ffbffffffc01fa3f9ffbfffe7f9ffe71fcfe7ff7ff7f9f9fcfffffeffbffbfffffffc07e7f9ffbfffe7ffffc71f9ff3ff7feff9f3fcfffffeffbffbffffffffe7e7f8ffbfffe7ffffd75f9ff3ff7ffffcf3fcfffffe7ffffbffffffffe7e7f9ffbfffe7ffffd35f9ff3ff7ffffcf3fcfffffe3ffffbfffffff80fe7f9ffbfffe7ffffd2cf9ff3ff7ffffcf3fcffffff07fffbfffffff7cfe7f3ffbfffe7ffffb2cf9ff3ff7fe000f3fcffffffc1fffbffffffe7e7e7c7ffbfffe7ffffbacf9ff3ff7fe7fcf3fcfffffff87ffbffffffe7e7e03fffbfffe7ffffb9ef9ff3ff7fe7fcf3fcfffffffe7ffbffffffefe7e7ffffbfffe7ffffb9e79ff3ff7fe7f9f3fcfffffeff3ffbffffffefe7e7f9ffbfffe7ffff79e7cfe7ff7ff3f9f9f8fffffeff3ffbffffffe7e7f7f1ffbfffe7f1ff79e7efcfff7ff3f3f9f0fffffe7f7ffbffffff27eff3f3ffbfffe7f0fe38e3f39fff7ffce7fc04fffffe1cfff9ffffff019ff9e7ffbfffe7f1fffffffc7fff7fff1fffbcfffffee3fff87fffffbe7ffe1fffbffe00ffffffffffffff7ffffffffcffffffffffffffffffffffffffffbfffe7ffffffffffffff7ffffffffcffffffffffffffffffffffffffffbfffe7ffffffffffffff7ffffffffcffffffffffffffffffffffffffffbfffe7ffffffffffffff7ffffffffcffffffffffffffffffffffffffffbfe7e7ffffffffffffff7ffffffffcfffffffffff3ffffffffffffffffbfe7efffffffffffffff7ffffffffcfffffffffff1ffffffffffffffffbfe7cfffffffffffffff03fffffffc3ffffffffff1ffffffffffffffff81f03fffffffffffffff3ffffffffcfffffffffffbffffffffffffffff9ffffff0d0a424152203334382c203433392c20322c2039360d0a424152203239322c203533352c2035362c20320d0a424152203330302c203439352c2034382c20320d0a424152203236302c203434372c20322c2038380d0a424152203230342c203434372c2035362c20320d0a424152203137362c203434372c20322c2039360d0a424152203131362c203435352c20322c2038320d0a424152203132302c203437392c2035362c20320d0a4241522034342c203533352c2034382c20320d0a4241522039322c203435352c20322c2038300d0a4241522032302c203435352c2037322c20320d0a4241522032312c203435352c20322c2034300d0a4241522032312c203439352c2032342c20320d0a4241522034352c203437392c20322c2031360d0a4241522033362c203437392c2031362c20320d0a424152203238342c203339312c2034302c20320d0a424152203332342c203334332c20322c2034380d0a424152203332342c203238372c20322c2033320d0a424152203237362c203238372c2034382c20320d0a4241522035322c203331312c2034382c20320d0a424152203238342c203233392c2034382c20320d0a424152203330382c203138332c20322c2035360d0a424152203134382c203233392c2034382c20320d0a424152203139362c203139312c20322c2034380d0a424152203134382c203139312c2034382c20320d0a4241522036382c203139312c2034382c20320d0a4241522037362c203135312c2034302c20320d0a4241522037362c203131392c20322c2033320d0a4241522037362c2035352c20322c2033320d0a4241522037362c2035352c2034382c20320d0a424152203131322c203533352c2036342c20320d0a424152203332302c203334332c2031362c20320d0a424152203332302c203331392c2031362c20320d0a424152203333362c203331392c20322c2032340d0a4241522035362c203132302c2032342c20320d0a4241522035362c2038372c2032342c20320d0a4241522035362c2038382c20322c2033320d0a424152203232342c203234372c2033322c20320d0a424152203235362c203231352c20322c2033320d0a424152203232342c203231352c2033322c20320d0a424152203232342c203138342c20322c2033320d0a424152203232342c203139312c2033322c20320d0a424152203237322c203331312c20322c2035360d0a424152203231362c203336372c2035362c20320d0a424152203231362c203331392c20322c2034380d0a424152203234302c203331382c20322c2034390d0a424152203138342c203335312c20322c2031360d0a424152203136382c203335312c2031362c20320d0a424152203136382c203331312c20322c2034300d0a424152203135322c203335312c2031362c20320d0a424152203135322c203335312c20322c2031360d0a5052494e5420312c310d0a"
d = binascii.unhexlify(DATA_RAW).splitlines()
bitmaps = []
bars = []
for i in d:
if i.startswith(b"BITMAP"):
x,y,w,h,mode,data=i[7:].split(b",",5)
bitmaps.append([int(x),int(y),int(w),int(h),data])
if i.startswith(b"BAR"):
bars.append([int(x.strip()) for x in i[3:].split(b',')])
im = Image.new("1",(480,640),color=1)
for bitmap in bitmaps:
x,y,w,h,data = bitmap
for i in range(h):
for j in range(w):
for k in range(8):
im.putpixel((x+j*8+k,y+i),data[i*w+j]&2**(7-k))
im_draw = ImageDraw.Draw(im)
for bar in bars:
x,y,w,h=bar
im_draw.rectangle([(x,y),(x+w,y+h)], fill=0, outline=0)
im = im.transpose(Image.FLIP_LEFT_RIGHT)
im = im.transpose(Image.FLIP_TOP_BOTTOM)
im.save("result.png")
| 276.074074
| 6,675
| 0.963375
| 163
| 7,454
| 44.006135
| 0.392638
| 0.001115
| 0.001255
| 0.001673
| 0.006134
| 0
| 0
| 0
| 0
| 0
| 0
| 0.372333
| 0.012745
| 7,454
| 27
| 6,676
| 276.074074
| 0.602392
| 0.002817
| 0
| 0
| 0
| 0
| 0.899502
| 0.896542
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 0.076923
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18ce58e3f83008deb5b9546928996dde4ef88dc1
| 7,964
|
py
|
Python
|
src/chaospy/descriptives/sensitivity.py
|
yoon-gu/chaospy
|
fe541840a79882008f38764cd7ba4935a4fd4fa3
|
[
"BSD-3-Clause"
] | 1
|
2018-08-22T01:51:25.000Z
|
2018-08-22T01:51:25.000Z
|
src/chaospy/descriptives/sensitivity.py
|
TribleCircle/chaospy
|
f22aa31e2a338a32a6d09b810c5b629c10a87236
|
[
"BSD-3-Clause"
] | null | null | null |
src/chaospy/descriptives/sensitivity.py
|
TribleCircle/chaospy
|
f22aa31e2a338a32a6d09b810c5b629c10a87236
|
[
"BSD-3-Clause"
] | 1
|
2019-11-24T17:16:30.000Z
|
2019-11-24T17:16:30.000Z
|
import numpy as np
import chaospy
from .second1d import Var
from .first import E_cond
from .first import E as E_total
def Sens_m(poly, dist, **kws):
"""
Variance-based decomposition
AKA Sobol' indices
First order sensitivity indices
"""
dim = len(dist)
if poly.dim<dim:
poly = chaospy.poly.setdim(poly, len(dist))
zero = [0]*dim
out = np.zeros((dim,) + poly.shape)
V = Var(poly, dist, **kws)
for i in range(dim):
zero[i] = 1
out[i] = Var(E_cond(poly, zero, dist, **kws),
dist, **kws)/(V+(V == 0))*(V != 0)
zero[i] = 0
return out
def Sens_m2(poly, dist, **kws):
"""
Variance-based decomposition
AKA Sobol' indices
Second order sensitivity indices
"""
dim = len(dist)
if poly.dim<dim:
poly = chaospy.poly.setdim(poly, len(dist))
zero = [0]*dim
out = np.zeros((dim, dim) + poly.shape)
mean = E_total(poly, dist)
V_total = Var(poly, dist)
E_cond_i = [None]*dim
V_E_cond_i = [None]*dim
for i in range(dim):
zero[i] = 1
E_cond_i[i] = E_cond(poly, zero, dist, **kws)
V_E_cond_i[i] = Var(E_cond_i[i], dist, **kws)
zero[i] = 0
for i in range(dim):
zero[i] = 1
for j in range(i+1, dim):
zero[j] = 1
E_cond_ij = E_cond(poly, zero, dist, **kws)
out[j, i] = out[i, j] = (Var(E_cond_ij, dist, **kws)-V_E_cond_i[i] - V_E_cond_i[j]) /(V_total+(V_total == 0))*(V_total != 0)
zero[j] = 0
zero[i] = 0
return out
def Sens_t(poly, dist, **kws):
"""
Variance-based decomposition
AKA Sobol' indices
Total effect sensitivity index
"""
dim = len(dist)
if poly.dim<dim:
poly = chaospy.poly.setdim(poly, len(dist))
zero = [1]*dim
out = np.zeros((dim,) + poly.shape, dtype=float)
V = Var(poly, dist, **kws)
for i in range(dim):
zero[i] = 0
out[i] = (V-Var(E_cond(poly, zero, dist, **kws),
dist, **kws))/(V+(V==0))**(V!=0)
zero[i] = 1
return out
def Sens_m_nataf(order, dist, samples, vals, **kws):
"""
Variance-based decomposition thorugh the Nataf distribution.
Generates first order sensitivity indices
Args:
order (int): polynomial order used `orth_ttr`.
dist (Copula): Assumed to be Nataf with independent components
samples (array_like): Samples used for evaluation (typically generated
from `dist`.)
vals (array_like): Evaluations of the model for given samples.
Returns:
np.ndarray: Sensitivity indices with
`shape==(len(dist),) + vals.shape[1:]`
"""
assert dist.__class__.__name__ == "Copula"
trans = dist.prm["trans"]
assert trans.__class__.__name__ == "nataf"
vals = np.array(vals)
cov = trans.prm["C"]
cov = np.dot(cov, cov.T)
marginal = dist.prm["dist"]
dim = len(dist)
orth = chaospy.orthogonal.orth_ttr(order, marginal, sort="GR")
r = range(dim)
index = [1] + [0]*(dim-1)
nataf = chaospy.dist.Nataf(marginal, cov, r)
samples_ = marginal.inv( nataf.fwd( samples ) )
poly, coeffs = chaospy.collocation.fit_regression(
orth, samples_, vals, retall=1)
V = Var(poly, marginal, **kws)
out = np.zeros((dim,) + poly.shape)
out[0] = Var(E_cond(poly, index, marginal, **kws),
marginal, **kws)/(V+(V == 0))*(V != 0)
for i in range(1, dim):
r = r[1:] + r[:1]
index = index[-1:] + index[:-1]
nataf = chaospy.dist.Nataf(marginal, cov, r)
samples_ = marginal.inv( nataf.fwd( samples ) )
poly, coeffs = chaospy.collocation.fit_regression(
orth, samples_, vals, retall=1)
out[i] = Var(E_cond(poly, index, marginal, **kws),
marginal, **kws)/(V+(V == 0))*(V != 0)
return out
def Sens_t_nataf(order, dist, samples, vals, **kws):
"""
Variance-based decomposition thorugh the Nataf distribution.
Total order sensitivity indices
Args:
order (int): polynomial order used `orth_ttr`.
dist (Copula): Assumed to be Nataf with independent components
samples (array_like): Samples used for evaluation (typically generated
from `dist`.)
vals (array_like): Evaluations of the model for given samples.
Returns:
np.ndarray: Sensitivity indices with
`shape==(len(dist),)+vals.shape[1:]`
"""
assert dist.__class__.__name__ == "Copula"
trans = dist.prm["trans"]
assert trans.__class__.__name__ == "nataf"
vals = np.array(vals)
cov = trans.prm["C"]
cov = np.dot(cov, cov.T)
marginal = dist.prm["dist"]
dim = len(dist)
orth = chaospy.orthogonal.orth_ttr(order, marginal, sort="GR")
r = range(dim)
index = [0] + [1]*(dim-1)
nataf = chaospy.dist.Nataf(marginal, cov, r)
samples_ = marginal.inv( nataf.fwd( samples ) )
poly, coeffs = chaospy.collocation.fit_regression(
orth, samples_, vals, retall=1)
V = Var(poly, marginal, **kws)
out = np.zeros((dim,) + poly.shape)
out[0] = (V-Var(E_cond(poly, index, marginal, **kws),
marginal, **kws))/(V+(V == 0))**(V != 0)
for i in range(1, dim):
r = r[1:] + r[:1]
index = index[-1:] + index[:-1]
nataf = chaospy.dist.Nataf(marginal, cov, r)
samples_ = marginal.inv( nataf.fwd( samples ) )
poly, coeffs = chaospy.collocation.fit_regression(
orth, samples_, vals, retall=1)
out[i] = (V-Var(E_cond(poly, index, marginal, **kws),
marginal, **kws))/(V+(V == 0))*(V != 0)
return out
def Sens_nataf(order, dist, samples, vals, **kws):
"""
Variance-based decomposition thorugh the Nataf distribution.
Main and total order sensitivity indices
Args:
order (int): polynomial order used `orth_ttr`.
dist (Copula): Assumed to be Nataf with independent components
samples (array_like): Samples used for evaluation (typically generated
from `dist`.)
vals (array_like): Evaluations of the model for given samples.
Returns:
np.ndarray: Sensitivity indices with
`shape==(2, len(dist),)+vals.shape[1:]`. First component is
main and second is total.
"""
assert dist.__class__.__name__ == "Copula"
trans = dist.prm["trans"]
assert trans.__class__.__name__ == "nataf"
vals = np.array(vals)
cov = trans.prm["C"]
cov = np.dot(cov, cov.T)
marginal = dist.prm["dist"]
dim = len(dist)
orth = chaospy.orthogonal.orth_ttr(order, marginal, sort="GR")
r = range(dim)
index0 = [0] + [1]*(dim-1)
index1 = [1] + [0]*(dim-1)
nataf = chaospy.dist.Nataf(marginal, cov, r)
samples_ = marginal.inv( nataf.fwd( samples ) )
poly, coeffs = chaospy.collocation.fit_regression(
orth, samples_, vals, retall=1)
V = Var(poly, marginal, **kws)
out = np.zeros((2, dim,) + poly.shape)
out[0, 0] = (V - Var(E_cond(poly, index0, marginal, **kws),
marginal, **kws))/(V+(V == 0))**(V != 0)
out[1, 0] = Var(E_cond(poly, index1, marginal, **kws),
marginal, **kws)/(V+(V == 0))*(V != 0)
for i in range(1, dim):
r = r[1:] + r[:1]
index0 = index0[-1:] + index0[:-1]
nataf = chaospy.dist.Nataf(marginal, cov, r)
samples_ = marginal.inv( nataf.fwd( samples ) )
poly, coeffs = chaospy.collocation.fit_regression(
orth, samples_, vals, retall=1)
out[0, i] = (V-Var(E_cond(poly, index0, marginal, **kws),
marginal, **kws))/(V+(V == 0))*(V != 0)
out[1, i] = Var(E_cond(poly, index1, marginal, **kws),
marginal, **kws)/(V+(V == 0))*(V != 0)
return out[::-1]
| 28.341637
| 136
| 0.562657
| 1,081
| 7,964
| 4.027752
| 0.102683
| 0.025264
| 0.022049
| 0.027561
| 0.90124
| 0.887
| 0.878503
| 0.861736
| 0.843133
| 0.807304
| 0
| 0.016468
| 0.283275
| 7,964
| 280
| 137
| 28.442857
| 0.746321
| 0.224134
| 0
| 0.706667
| 0
| 0
| 0.011616
| 0
| 0
| 0
| 0
| 0
| 0.04
| 1
| 0.04
| false
| 0
| 0.033333
| 0
| 0.113333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
18f639420cdc205c4f6f64f3529ce41d21ab4fc1
| 158
|
py
|
Python
|
build/data/write_status.py
|
pathogen-genomics/introduction-website
|
74554dfc3596e5252f496d966c6e0a68bdbeca73
|
[
"MIT"
] | null | null | null |
build/data/write_status.py
|
pathogen-genomics/introduction-website
|
74554dfc3596e5252f496d966c6e0a68bdbeca73
|
[
"MIT"
] | null | null | null |
build/data/write_status.py
|
pathogen-genomics/introduction-website
|
74554dfc3596e5252f496d966c6e0a68bdbeca73
|
[
"MIT"
] | null | null | null |
# with open("status.json","w") as outf:
# print('{"status":"ok"}',file=outf)
with open("status.json","w") as outf:
print('{"status":"updating"}',file=outf)
| 31.6
| 41
| 0.620253
| 24
| 158
| 4.083333
| 0.458333
| 0.163265
| 0.285714
| 0.367347
| 0.734694
| 0.734694
| 0.734694
| 0.734694
| 0.734694
| 0
| 0
| 0
| 0.088608
| 158
| 5
| 41
| 31.6
| 0.680556
| 0.462025
| 0
| 0
| 0
| 0
| 0.39759
| 0.253012
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
18ffb74d15037e61827834a46ff04e9f193c663e
| 4,258
|
py
|
Python
|
tests/test_encoders.py
|
zhammer/dd-trace-py
|
4c30f6e36bfa34a63cd9b6884677c977f76d2a01
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 5
|
2020-03-07T01:12:29.000Z
|
2021-04-21T00:53:19.000Z
|
tests/test_encoders.py
|
zhammer/dd-trace-py
|
4c30f6e36bfa34a63cd9b6884677c977f76d2a01
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 4
|
2019-11-22T20:58:01.000Z
|
2020-08-17T21:16:13.000Z
|
tests/test_encoders.py
|
zhammer/dd-trace-py
|
4c30f6e36bfa34a63cd9b6884677c977f76d2a01
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 3
|
2020-03-18T16:29:20.000Z
|
2020-07-20T16:05:10.000Z
|
import json
from unittest import TestCase
from ddtrace.span import Span
from ddtrace.compat import msgpack_type, string_type
from ddtrace.encoding import JSONEncoder, MsgpackEncoder
class TestEncoders(TestCase):
"""
Ensures that Encoders serialize the payload as expected.
"""
def test_encode_traces_json(self):
# test encoding for JSON format
traces = []
traces.append([
Span(name='client.testing', tracer=None),
Span(name='client.testing', tracer=None),
])
traces.append([
Span(name='client.testing', tracer=None),
Span(name='client.testing', tracer=None),
])
encoder = JSONEncoder()
spans = encoder.encode_traces(traces)
items = json.loads(spans)
# test the encoded output that should be a string
# and the output must be flatten
assert isinstance(spans, string_type)
assert len(items) == 2
assert len(items[0]) == 2
assert len(items[1]) == 2
for i in range(2):
for j in range(2):
assert 'client.testing' == items[i][j]['name']
def test_join_encoded_json(self):
# test encoding for JSON format
traces = []
traces.append([
Span(name='client.testing', tracer=None),
Span(name='client.testing', tracer=None),
])
traces.append([
Span(name='client.testing', tracer=None),
Span(name='client.testing', tracer=None),
])
encoder = JSONEncoder()
# Encode each trace on it's own
encoded_traces = [
encoder.encode_trace(trace)
for trace in traces
]
# Join the encoded traces together
data = encoder.join_encoded(encoded_traces)
# Parse the resulting data
items = json.loads(data)
# test the encoded output that should be a string
# and the output must be flatten
assert isinstance(data, string_type)
assert len(items) == 2
assert len(items[0]) == 2
assert len(items[1]) == 2
for i in range(2):
for j in range(2):
assert 'client.testing' == items[i][j]['name']
def test_encode_traces_msgpack(self):
# test encoding for MsgPack format
traces = []
traces.append([
Span(name='client.testing', tracer=None),
Span(name='client.testing', tracer=None),
])
traces.append([
Span(name='client.testing', tracer=None),
Span(name='client.testing', tracer=None),
])
encoder = MsgpackEncoder()
spans = encoder.encode_traces(traces)
items = encoder.decode(spans)
# test the encoded output that should be a string
# and the output must be flatten
assert isinstance(spans, msgpack_type)
assert len(items) == 2
assert len(items[0]) == 2
assert len(items[1]) == 2
for i in range(2):
for j in range(2):
assert b'client.testing' == items[i][j][b'name']
def test_join_encoded_msgpack(self):
# test encoding for MsgPack format
traces = []
traces.append([
Span(name='client.testing', tracer=None),
Span(name='client.testing', tracer=None),
])
traces.append([
Span(name='client.testing', tracer=None),
Span(name='client.testing', tracer=None),
])
encoder = MsgpackEncoder()
# Encode each individual trace on it's own
encoded_traces = [
encoder.encode_trace(trace)
for trace in traces
]
# Join the encoded traces together
data = encoder.join_encoded(encoded_traces)
# Parse the encoded data
items = encoder.decode(data)
# test the encoded output that should be a string
# and the output must be flatten
assert isinstance(data, msgpack_type)
assert len(items) == 2
assert len(items[0]) == 2
assert len(items[1]) == 2
for i in range(2):
for j in range(2):
assert b'client.testing' == items[i][j][b'name']
| 31.308824
| 64
| 0.569281
| 503
| 4,258
| 4.763419
| 0.145129
| 0.108514
| 0.093489
| 0.140234
| 0.846828
| 0.834725
| 0.805509
| 0.805509
| 0.805509
| 0.805509
| 0
| 0.009739
| 0.3248
| 4,258
| 135
| 65
| 31.540741
| 0.823652
| 0.160639
| 0
| 0.787234
| 0
| 0
| 0.083592
| 0
| 0
| 0
| 0
| 0
| 0.212766
| 1
| 0.042553
| false
| 0
| 0.053191
| 0
| 0.106383
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7a15c175106a4e3ec9c4e277ff5206e4bf475181
| 159
|
py
|
Python
|
burberryCrawl/database/__init__.py
|
matheuslins/burberrayCrawl
|
e4677da3affd6594b81ac892dabc86fefa556a95
|
[
"MIT"
] | null | null | null |
burberryCrawl/database/__init__.py
|
matheuslins/burberrayCrawl
|
e4677da3affd6594b81ac892dabc86fefa556a95
|
[
"MIT"
] | null | null | null |
burberryCrawl/database/__init__.py
|
matheuslins/burberrayCrawl
|
e4677da3affd6594b81ac892dabc86fefa556a95
|
[
"MIT"
] | null | null | null |
from elasticsearch import Elasticsearch
from burberryCrawl.settings import ES_HOST
def config_client():
return Elasticsearch(hosts=ES_HOST, timeout=25)
| 19.875
| 51
| 0.81761
| 20
| 159
| 6.35
| 0.7
| 0.094488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014388
| 0.125786
| 159
| 7
| 52
| 22.714286
| 0.899281
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
e1310911e4b4dc9b008dd5aee05dcae0cbe50f7d
| 58,141
|
py
|
Python
|
scripts/comparesim.py
|
barronh/MONET
|
acd72487c7aeff66d89f87fa663a9c96fa9b7bb0
|
[
"MIT"
] | 1
|
2019-07-09T19:50:59.000Z
|
2019-07-09T19:50:59.000Z
|
scripts/comparesim.py
|
barronh/MONET
|
acd72487c7aeff66d89f87fa663a9c96fa9b7bb0
|
[
"MIT"
] | null | null | null |
scripts/comparesim.py
|
barronh/MONET
|
acd72487c7aeff66d89f87fa663a9c96fa9b7bb0
|
[
"MIT"
] | null | null | null |
#!/data/aqf/barryb/anaconda2/bin/python
###for AITKEN
#### /data/aqf/barryb/anaconda2/bin/python
###for WCOSS
### /naqfc/noscrub/Barry.Baker/anaconda2/bin/python
import f90nml
from numpy import sort
import matplotlib as mpl
mpl.use('Agg')
import matplotlib.pyplot as plt
from numpy import unique,sort
from datetime import datetime, timedelta
import pandas as pd
from glob import glob
from verify_airnow import verify_airnow
import sys
import utils
#Read the Namelist
nml = f90nml.read('comparesim.namelist')
base = nml['files']['basename']
gridcro = nml['files']['gridcro']
datapath = nml['files']['data_dir']
interp = nml['interp']['method']
neighbors = nml['interp']['neighbors']
radius = nml['interp']['radius_of_influence']
#airnow user and pass
usr = 'Barry.Baker'
p = 'p00pST!ck123'
#INTERP SIMULATIONS TO OBSERVATIONS
if nml['files']['sim1'].lower() != 'none':
print 'Pairing Sim1...'
print ' '
if nml['files']['sim1'].lower()[-4:] =='.hdf':
print ' Loading Paired Data: ', nml['files']['sim1']
sim1 = verify_airnow()
sim1.df = pd.read_hdf(nml['files']['sim1'])
else:
import monet as m
files = sort(glob(nml['files']['sim1']))
print 'sim1met', type(nml['files']['sim1met'])
if nml['files']['sim1met'].lower() == 'none':
metfiles = ''
else:
metfiles = sort(glob(nml['files']['sim1met']))
print metfiles
if nml['files']['obs_network'].upper() == 'AQS':
print 'here'
sim1 = m.vaqs(concpath=files,gridcro=gridcro,met2dpath=metfiles,datapath=datapath,interp=interp,neighbors=neighbors,radius=radius)
else:
sim1 = m.vairnow(concpath=files,gridcro=gridcro,met2dpath=metfiles,datapath=datapath,interp=interp,neighbors=neighbors,radius=radius,user=usr,passw=p)
print sim1.df.keys()
if nml['files']['sim2'].lower()!= 'none':
print ' '
print 'Pairing Sim2...'
if nml['files']['sim2'].lower()[-4:] =='.hdf':
print ' '
print ' Loading Paired Data: ', nml['files']['sim2']
sim2 = verify_airnow()
sim2.df = pd.read_hdf(nml['files']['sim2'])
else:
files = sort(glob(nml['files']['sim2']))
metfiles = nml['files']['sim2met']
if nml['files']['sim2met'].lower() == 'none':
metfiles = ''
else:
metfiles = sort(glob(nml['files']['sim2met']))
import monet as mm
if nml['files']['obs_network'].upper() == 'AQS':
sim2 = mm.vaqs(concpath=files,gridcro=gridcro,met2dpath=metfiles,datapath=datapath,interp=interp,neighbors=neighbors,radius=radius)
else:
sim2 = mm.vairnow(concpath=files,gridcro=gridcro,met2dpath=metfiles,datapath=datapath,interp=interp,neighbors=neighbors,radius=radius,user=usr,passw=p)
else:
sim2=False
if nml['files']['sim3'].lower()!= 'none':
print ' '
print 'Pairing Sim3...'
print ' '
if nml['files']['sim3'].lower()[-4:] =='.hdf':
print ' Loading Paired Data: ', nml['files']['sim3']
sim3 = verify_airnow()
sim3.df = pd.read_hdf(nml['files']['sim3'])
else:
import monet as mmm
files = sort(glob(nml['files']['sim3']))
if nml['files']['sim3met'].lower() == 'none':
metfiles = ''
else:
metfiles = sort(glob(nml['files']['sim3met']))
if nml['files']['obs_network'].upper() == 'AQS':
sim3 = mmm.vaqs(concpath=files,gridcro=gridcro,met2dpath=metfiles,datapath=datapath,interp=interp,neighbors=neighbors,radius=radius)
else:
sim3 = mmm.vairnow(concpath=files,gridcro=gridcro,met2dpath=metfiles,datapath=datapath,interp=interp,neighbors=neighbors,radius=radius,user=usr,passw=p)
else:
sim3 = False
if nml['files']['sim4'].lower()!= 'none':
print 'Pairing Sim4...'
print ' '
if nml['files']['sim4'].lower()[:-4] =='.hdf':
print ' Loading Paired Data: ', nml['files']['sim4']
import verify_airnow as vairnow
sim4 = vairnow()
sim4.df = pd.read_hdf(nml['files']['sim4'])
else:
import monet
files = sort(glob(nml['files']['sim4']))
if nml['files']['sim4met'].lower() == 'none':
metfiles = ''
else:
metfiles = sort(glob(nml['files']['sim4met']))
if nml['files']['obs_network'].upper() == 'AQS':
sim4 = monet.vaqs(concpath=files,gridcro=gridcro,met2dpath=metfiles,datapath=datapath,interp=interp,neighbors=neighbors,radius=radius)
else:
sim4 = monet.vairnow(concpath=files,gridcro=gridcro,met2dpath=metfiles,datapath=datapath,interp=interp,neighbors=neighbors,radius=radius,user=usr,passw=p)
else:
sim4 = False
if nml['files']['save']:
sim1.df.to_hdf(nml['files']['sim1_save_name'],'df',format='fixed')
if nml['files']['sim2'].lower() != 'none':
sim2.df.to_hdf(nml['files']['sim2_save_name'],'df',format='fixed')
if nml['files']['sim3'].lower() != 'none':
sim3.df.to_hdf(nml['files']['sim3_save_name'],'df',format='fixed')
if nml['files']['sim4'].lower() != 'none':
sim4.df.to_hdf(nml['files']['sim4_save_name'],'df',format='fixed')
if (nml['files']['start_date'] != 'none') & (nml['files']['end_date'] != 'none'):
if datetime.strptime(nml['files']['start_date'],'%Y-%m-%d %H') > datetime.strptime(nml['files']['end_date'],'%Y-%m-%d %H'):
print 'end_date must be larger than than start_date'
exit
con = (sim1.df.datetime >= datetime.strptime(nml['files']['start_date'],'%Y-%m-%d %H')) & (sim1.df.datetime <= datetime.strptime(nml['files']['end_date'],'%Y-%m-%d %H'))
sim1.df = sim1.df.copy()[con]
if sim2 is not False:
sim2.df = sim2.df[sim2.df['datetime'] >= datetime.strptime(nml['files']['start_date'],'%Y-%m-%d %H')]
sim2.df = sim2.df[sim2.df['datetime'] <= datetime.strptime(nml['files']['end_date'],'%Y-%m-%d %H')]
if sim3 is not False:
con = (sim3.df.datetime >= datetime.strptime(nml['files']['start_date'],'%Y-%m-%d %H')) & (sim3.df.datetime <= datetime.strptime(nml['files']['end_date'],'%Y-%m-%d %H'))
sim3.df=sim3.df.copy()[con]
if sim4 is not False:
con = (sim4.df.datetime >= datetime.strptime(nml['files']['start_date'],'%Y-%m-%d %H')) & (sim4.df.datetime <= datetime.strptime(nml['files']['end_date'],'%Y-%m-%d %H'))
sim4.df=sim4.df.copy()[con]
sim1.df.dropna(subset=['Obs','CMAQ'],inplace=True)
if sim2 is not False:
sim2.df.dropna(subset=['Obs','CMAQ'],inplace=True)
if sim3 is not False:
sim3.df.dropna(subset=['Obs','CMAQ'],inplace=True)
if sim4 is not False:
sim4.df.dropna(subset=['Obs','CMAQ'],inplace=True)
#date = sim1.cmaq.dates[0]
#ymd= date.strftime('%Y%m%d')
#DOMAIN PLOTTING
if nml['domain']['params'].lower() != 'none':
if nml['domain']['params'] == 'all':
params = sort(sim1.df.Species.unique())
else:
params = nml['domain']['params'].split(',')
first = True
for i in params:
print i,'domain'
if nml['domain']['tseries']:
try:
sim1.compare_param(param=i,timeseries=True,label=nml['files']['sim1label'],footer=nml['domain']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,timeseries=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,timeseries=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,timeseries=True,fig=plt.figure(1),label=nml['files']['sim4label'])
code = '00000'
savename = base +'_'+i.replace('.','')+'_'+'timeseries.jpg'
plt.savefig(savename,dpi=75)
print 'Saving: ' + savename
plt.close('all')
except:
plt.close('all')
pass
if nml['domain']['tseriesrmse']:
try:
sim1.compare_param(param=i,timeseries_rmse=True,label=nml['files']['sim1label'],footer=nml['domain']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+'timeseries_rmse.jpg',dpi=75)
print 'Saving: ' + base +'_'+i.replace('.','')+'_'+'timeseries_rmse.jpg'
plt.close('all')
except:
plt.close('all')
pass
if nml['domain']['tseriesbias']:
try:
sim1.compare_param(param=i,timeseries_mb=True,label=nml['files']['sim1label'],footer=nml['domain']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+'timeseries_mb.jpg',dpi=75)
print 'Saving: ' + base +'_'+i.replace('.','')+'_'+'timeseries_mb.jpg'
plt.close('all')
except:
plt.close('all')
pass
if nml['domain']['scatter']:
try:
sim1.compare_param(param=i,scatter=True,label=nml['files']['sim1label'],footer=nml['domain']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,scatter=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,scatter=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,scatter=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+'scatter.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['domain']['diffscatter']:
try:
sim1.compare_param(param=i,diffscatter=True,label=nml['files']['sim1label'],footer=nml['domain']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+'diffscatter.jpg',dpi=75)
print 'Saving: ' + base +'_'+i.replace('.','')+'_'+'diffscatter.jpg'
plt.close('all')
except:
plt.close('all')
pass
if nml['domain']['pdfs']:
try:
sim1.compare_param(param=i,pdfs=True,label=nml['files']['sim1label'],footer=nml['domain']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,pdfs=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,pdfs=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,pdfs=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+'pdfs.jpg',dpi=75)
print 'Saving: ' + base +'_'+i.replace('.','')+'_'+'pdfs.jpg'
plt.close('all')
except:
plt.close('all')
pass
if nml['domain']['diffpdfs']:
try:
sim1.compare_param(param=i,diffpdfs=True,label=nml['files']['sim1label'],footer=nml['domain']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+'diffpdfs.jpg',dpi=75)
print 'Saving: ' + base +'_'+i.replace('.','')+'_'+'diffpdfs.jpg'
plt.close('all')
except:
plt.close('all')
pass
if nml['domain']['taylordiagram']:
try:
dia = sim1.compare_param(param=i,taylordiagram=True,label=nml['files']['sim1label'])
if sim2 is not False:
sim2.compare_param(param=i,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim2label'],dia=dia,marker='<')
if sim3 is not False:
sim3.compare_param(param=i,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim3label'],dia=dia,marker='*')
if sim4 is not False:
sim4.compare_param(param=i,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim4label'],dia=dia,maker='D')
plt.savefig(base +'_'+i.replace('.','')+'_'+'taylor.jpg',dpi=75)
print 'Saving: ' + base +'_'+i.replace('.','')+'_'+'taylor.jpg'
plt.close('all')
except:
plt.close('all')
pass
if nml['domain']['stats']:
try:
if first:
df,title = utils.get_epa_location_df(sim1.df,i)
utils.write_table(df=df,fname='DOMAIN',label=nml['files']['sim1label'],location='Domain')
first=False
else:
df,title = utils.get_epa_location_df(sim1.df,i)
utils.write_table(df=df,fname='DOMAIN',label=nml['files']['sim1label'],location='Domain',append=True)
if sim2 is not False:
df,title = utils.get_epa_location_df(sim2.df,i)
utils.write_table(df=df,fname='DOMAIN',label=nml['files']['sim2label'],append=True,location='Domain')
if sim3 is not False:
df,title = utils.get_epa_location_df(sim3.df,i)
utils.write_table(df=df,fname='DOMAIN',label=nml['files']['sim3label'],append=True,location='Domain')
if sim4 is not False:
df,title = utils.get_epa_location_df(sim4.df,i)
utils.write_table(df=df,fname='DOMAIN',label=nml['files']['sim4label'],append=True,location='Domain')
except:
pass
#Regions
if (nml['region']['params'].lower() != 'none') & (nml['region']['region'].lower() !='none'):
sim1.df = utils.get_region(sim1.df)
if sim2 is not False:
sim2.df = utils.get_region(sim2.df)
if sim3 is not False:
sim3.df = utils.get_region(sim3.df)
if sim4 is not False:
sim4.df = utils.get_region(sim4.df)
if nml['region']['params'] == 'all':
params = sort(sim1.df.Species.unique())
else:
params = nml['region']['params'].split(',')
if nml['region']['region'] =='all':
regions = sim1.df.Region.dropna().unique()
else:
regions = nml['region']['region'].split(',')
first=True
for j in regions:
for i in params:
print i,j
if nml['region']['tseries']:
try:
sim1.compare_param(param=i,region=j,timeseries=True,label=nml['files']['sim1label'],footer=nml['region']['footers'])
print 'here'
if sim2 is not False:
sim2.compare_param(param=i,region=j,timeseries=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,region=j,timeseries=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,region=j,timeseries=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries.jpg',dpi=75)
print 'Saving: ' + base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries.jpg'
plt.close('all')
except:
plt.close('all')
pass
if nml['region']['tseriesrmse']:
try:
sim1.compare_param(param=i,region=j,timeseries_rmse=True,label=nml['files']['sim1label'],footer=nml['region']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,region=j,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,region=j,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,region=j,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries_rmse.jpg',dpi=75)
print 'Saving: ' + base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries_rmse.jpg'
plt.close('all')
except:
plt.close('all')
pass
if nml['region']['tseriesbias']:
try:
sim1.compare_param(param=i,region=j,timeseries_mb=True,label=nml['files']['sim1label'],footer=nml['region']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,region=j,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,region=j,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,region=j,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries_mb.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['region']['scatter']:
try:
sim1.compare_param(param=i,region=j,scatter=True,label=nml['files']['sim1label'],footer=nml['region']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,region=j,scatter=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,region=j,scatter=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,region=j,scatter=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'scatter.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['region']['diffscatter']:
try:
sim1.compare_param(param=i,region=j,diffscatter=True,label=nml['files']['sim1label'],footer=nml['region']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,region=j,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,region=j,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,region=j,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'diffscatter.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['region']['pdfs']:
try:
sim1.compare_param(param=i,region=j,pdfs=True,label=nml['files']['sim1label'],footer=nml['region']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,region=j,pdfs=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,region=j,pdfs=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,region=j,pdfs=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'pdfs.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['region']['diffpdfs']:
try:
sim1.compare_param(param=i,region=j,diffpdfs=True,label=nml['files']['sim1label'],footer=nml['region']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,region=j,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,region=j,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,region=j,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'diffpdfs.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['region']['taylordiagram']:
#try:
dia = sim1.compare_param(param=i,region=j,taylordiagram=True,label=nml['files']['sim1label'])
if sim2 is not False:
sim2.compare_param(param=i,region=j,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim2label'],dia=dia,marker='<')
if sim3 is not False:
sim3.compare_param(param=i,region=j,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim3label'],dia=dia,maker='*')
if sim4 is not False:
sim4.compare_param(param=i,region=j,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim4label'],dia=dia,marker='D')
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'taylor.jpg',dpi=75)
plt.close('all')
#except:
# plt.close('all')
# pass
if nml['region']['stats']:
try:
if first:
df,title = utils.get_epa_location_df(sim1.df,i,region=j)
utils.write_table(df=df,fname='REGION',label=nml['files']['sim1label'],location=j)
first=False
else:
df,title = utils.get_epa_location_df(sim1.df,i,region=j)
utils.write_table(df=df,fname='REGION',label=nml['files']['sim1label'],location=j,append=True)
if sim2 is not False:
df,title = utils.get_epa_location_df(sim2.df,i,region=j)
utils.write_table(df=df,fname='REGION',label=nml['files']['sim2label'],append=True,location=j)
if sim3 is not False:
df,title = utils.get_epa_location_df(sim3.df,i,region=j)
utils.write_table(df=df,fname='REGION',label=nml['files']['sim3label'],append=True,location=j)
if sim4 is not False:
df,title = utils.get_epa_location_df(sim4.df,i,region=j)
utils.write_table(df=df,fname='REGION',label=nml['files']['sim4label'],append=True,location=j)
except:
pass
#EPA Regions
if (nml['epa_region']['params'].lower() != 'none') & (nml['epa_region']['epa_region'].lower() !='none'):
if nml['epa_region']['params'] == 'all':
params = sort(sim1.df.Species.unique())
else:
params = nml['epa_region']['params'].split(',')
if nml['epa_region']['epa_region'] =='all':
regions = sim1.df.EPA_region.dropna().unique()
else:
regions = nml['epa_region']['epa_region'].split(',')
first = True
for j in regions:
for i in params:
print i,j
if nml['epa_region']['tseries']:
try:
sim1.compare_param(param=i,epa_region=j,timeseries=True,label=nml['files']['sim1label'],footer=nml['epa_region']['footers'])
print 'here'
if sim2 is not False:
sim2.compare_param(param=i,epa_region=j,timeseries=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,epa_region=j,timeseries=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,epa_region=j,timeseries=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries.jpg',dpi=75)
print 'Saving: ' + base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries.jpg'
print 'Saving: ', savename
plt.close('all')
except:
plt.close('all')
pass
if nml['epa_region']['tseriesrmse']:
try:
sim1.compare_param(param=i,epa_region=j,timeseries_rmse=True,label=nml['files']['sim1label'],footer=nml['epa_region']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,epa_region=j,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,epa_region=j,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,epa_region=j,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries_rmse.jpg',dpi=75)
print 'Saving: ' + base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries_rmse.jpg'
plt.close('all')
except:
plt.close('all')
pass
if nml['epa_region']['tseriesbias']:
try:
sim1.compare_param(param=i,epa_region=j,timeseries_mb=True,label=nml['files']['sim1label'],footer=nml['epa_region']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,epa_region=j,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,epa_region=j,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,epa_region=j,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries_mb.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['epa_region']['scatter']:
try:
sim1.compare_param(param=i,epa_region=j,scatter=True,label=nml['files']['sim1label'],footer=nml['epa_region']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,epa_region=j,scatter=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,epa_region=j,scatter=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,epa_region=j,scatter=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'scatter.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['epa_region']['diffscatter']:
try:
sim1.compare_param(param=i,epa_region=j,diffscatter=True,label=nml['files']['sim1label'],footer=nml['epa_region']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,epa_region=j,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,epa_region=j,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,epa_region=j,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'diffscatter.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['epa_region']['pdfs']:
try:
sim1.compare_param(param=i,epa_region=j,pdfs=True,label=nml['files']['sim1label'],footer=nml['epa_region']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,epa_region=j,pdfs=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,epa_region=j,pdfs=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,epa_region=j,pdfs=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'pdfs.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['epa_region']['diffpdfs']:
try:
sim1.compare_param(param=i,epa_region=j,diffpdfs=True,label=nml['files']['sim1label'],footer=nml['epa_region']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,epa_region=j,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,epa_region=j,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,epa_region=j,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'diffpdfs.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['epa_region']['taylordiagram']:
try:
dia = sim1.compare_param(param=i,epa_region=j,taylordiagram=True,label=nml['files']['sim1label'])
if sim2 is not False:
sim2.compare_param(param=i,epa_region=j,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim2label'],dia=dia,marker='<')
if sim3 is not False:
sim3.compare_param(param=i,epa_region=j,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim3label'],dia=dia,maker='*')
if sim4 is not False:
sim4.compare_param(param=i,epa_region=j,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim4label'],dia=dia,marker='D')
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'taylor.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['epa_region']['stats']:
try:
if first:
df,title = utils.get_epa_location_df(sim1.df,i,epa_region=j)
utils.write_table(df=df,fname='EPA_REGION',label=nml['files']['sim1label'],location=j)
first=False
else:
df,title = utils.get_epa_location_df(sim1.df,i,epa_region=j)
utils.write_table(df=df,fname='EPA_REGION',label=nml['files']['sim1label'],location=j,append=True)
if sim2 is not False:
df,title = utils.get_epa_location_df(sim2.df,i,epa_region=j)
utils.write_table(df=df,fname='EPA_REGION',label=nml['files']['sim2label'],append=True,location=j)
if sim3 is not False:
df,title = utils.get_epa_location_df(sim3.df,i,epa_region=j)
utils.write_table(df=df,fname='EPA_REGION',label=nml['files']['sim3label'],append=True,location=j)
if sim4 is not False:
df,title = utils.get_epa_location_df(sim4.df,i,epa_region=j)
utils.write_table(df=df,fname='EPA_REGION',label=nml['files']['sim4label'],append=True,location=j)
except:
pass
#States
if (nml['state']['params'].lower() != 'none') & (nml['state']['state'].lower() !='none'):
plt.close('all')
if nml['state']['params'] == 'all':
params = sim1.df.Species.unique()
else:
params = nml['state']['params'].split(',')
if nml['state']['state'] =='all':
states = sim1.df.State_Name.unique()
else:
states = nml['state']['state'].split(',')
first = True
for j in states:
for i in params:
print i,j
if nml['state']['tseries']:
try:
sim1.compare_param(param=i,state=j,timeseries=True,label=nml['files']['sim1label'],footer=nml['state']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,state=j,timeseries=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,state=j,timeseries=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,state=j,timeseries=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['state']['tseriesrmse']:
try:
sim1.compare_param(param=i,state=j,timeseries_rmse=True,label=nml['files']['sim1label'],footer=nml['state']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,state=j,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,state=j,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,state=j,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries_rmse.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['state']['tseriesbias']:
try:
sim1.compare_param(param=i,state=j,timeseries_mb=True,label=nml['files']['sim1label'],footer=nml['state']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,state=j,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,state=j,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,state=j,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries_mb.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['state']['scatter']:
try:
sim1.compare_param(param=i,state=j,scatter=True,label=nml['files']['sim1label'],footer=nml['state']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,state=j,scatter=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,state=j,scatter=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,state=j,scatter=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'scatter.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['state']['diffscatter']:
try:
sim1.compare_param(param=i,state=j,diffscatter=True,label=nml['files']['sim1label'],footer=nml['state']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,state=j,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,state=j,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,state=j,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'diffscatter.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['state']['pdfs']:
try:
sim1.compare_param(param=i,state=j,pdfs=True,label=nml['files']['sim1label'],footer=nml['state']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,state=j,pdfs=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,state=j,pdfs=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,state=j,pdfs=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'pdfs.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['state']['diffpdfs']:
try:
sim1.compare_param(param=i,state=j,diffpdfs=True,label=nml['files']['sim1label'],footer=nml['state']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,state=j,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim2label'])
if sim3 is not False:
sim3.compare_param(param=i,state=j,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim3label'])
if sim4 is not False:
sim4.compare_param(param=i,state=j,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim4label'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'diffpdfs.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['state']['taylordiagram']:
try:
dia = sim1.compare_param(param=i,state=j,taylordiagram=True,label=nml['files']['sim1label'])
if sim2 is not False:
sim2.compare_param(param=i,state=j,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim2label'],dia=dia,marker='<')
if sim3 is not False:
sim3.compare_param(param=i,state=j,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim3label'],dia=dia,maker='*')
if sim4 is not False:
sim4.compare_param(param=i,state=j,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim4label'],dia=dia,marker='D')
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'taylor.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['state']['stats']:
try:
if first:
df,title = utils.get_epa_location_df(sim1.df,i,state=j)
utils.write_table(df=df,fname='STATE',label=nml['files']['sim1label'],location=j)
first=False
else:
df,title = utils.get_epa_location_df(sim1.df,i,state=j)
utils.write_table(df=df,fname='STATE',label=nml['files']['sim1label'],location=j,append=True)
if sim2 is not False:
df,title = utils.get_epa_location_df(sim2.df,i,state=j)
utils.write_table(df=df,fname='STATE',label=nml['files']['sim2label'],append=True,location=j)
if sim3 is not False:
df,title = utils.get_epa_location_df(sim3.df,i,state=j)
utils.write_table(df=df,fname='STATE',label=nml['files']['sim3label'],append=True,location=j)
if sim4 is not False:
df,title = utils.get_epa_location_df(sim4.df,i,state=j)
utils.write_table(df=df,fname='STATE',label=nml['files']['sim4label'],append=True,location=j)
except:
pass
#CITY
if (nml['city']['params'].lower() != 'none') & (nml['city']['city'].lower() !='none'):
if nml['city']['params'] == 'all':
params = sim1.df.Species.unique()
else:
params = nml['city']['params'].split(',')
if nml['city']['city'] =='all':
citys = sim1.df.MSA_Name.unique()
else:
citys = nml['city']['city'].split(',')
first = True
for j in citys:
for i in params:
print i,j
# names = sim1.df.MSA_Name.dropna().values
# codes = sim1.df.MSA_Code.dropna().values
# names,index = unique(names,return_index=True)
# codes = codes[index].astype('|S5')
# for k,p in zip(names,codes):
# if j.lower() in k.lower():
# name = k
# code = p
if nml['city']['tseries']:
try:
sim1.compare_param(param=i,city=j,timeseries=True,label=nml['files']['sim1label'],footer=nml['city']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,city=j,timeseries=True,fig=plt.figure(1),label=nml['files']['sim2label'],footer=nml['city']['footers'])
if sim3 is not False:
sim3.compare_param(param=i,city=j,timeseries=True,fig=plt.figure(1),label=nml['files']['sim3label'],footer=nml['city']['footers'])
if sim4 is not False:
sim4.compare_param(param=i,city=j,timeseries=True,fig=plt.figure(1),label=nml['files']['sim4label'],footer=nml['city']['footers'])
savename = base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries_rmse.jpg'
plt.savefig(savename,dpi=75)
print 'Saving: ', savename
plt.close('all')
except:
plt.close('all')
pass
if nml['city']['tseriesrmse']:
try:
sim1.compare_param(param=i,city=j,timeseries_rmse=True,label=nml['files']['sim1label'],footer=nml['city']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,city=j,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim2label'],footer=nml['city']['footers'])
if sim3 is not False:
sim3.compare_param(param=i,city=j,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim3label'],footer=nml['city']['footers'])
if sim4 is not False:
sim4.compare_param(param=i,city=j,timeseries_rmse=True,fig=plt.figure(1),label=nml['files']['sim4label'],footer=nml['city']['footers'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries_rmse.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['city']['tseriesbias']:
try:
sim1.compare_param(param=i,city=j,timeseries_mb=True,label=nml['files']['sim1label'],footer=nml['city']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,city=j,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim2label'],footer=nml['city']['footers'])
if sim3 is not False:
sim3.compare_param(param=i,city=j,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim3label'],footer=nml['city']['footers'])
if sim4 is not False:
sim4.compare_param(param=i,city=j,timeseries_mb=True,fig=plt.figure(1),label=nml['files']['sim4label'],footer=nml['city']['footers'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'timeseries_mb.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['city']['scatter']:
try:
sim1.compare_param(param=i,city=j,scatter=True,label=nml['files']['sim1label'],footer=nml['city']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,city=j,scatter=True,fig=plt.figure(1),label=nml['files']['sim2label'],footer=nml['city']['footers'])
if sim3 is not False:
sim3.compare_param(param=i,city=j,scatter=True,fig=plt.figure(1),label=nml['files']['sim3label'],footer=nml['city']['footers'])
if sim4 is not False:
sim4.compare_param(param=i,city=j,scatter=True,fig=plt.figure(1),label=nml['files']['sim4label'],footer=nml['city']['footers'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'scatter.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['city']['diffscatter']:
try:
sim1.compare_param(param=i,city=j,diffscatter=True,label=nml['files']['sim1label'],footer=nml['city']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,city=j,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim2label'],footer=nml['city']['footers'])
if sim3 is not False:
sim3.compare_param(param=i,city=j,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim3label'],footer=nml['city']['footers'])
if sim4 is not False:
sim4.compare_param(param=i,city=j,diffscatter=True,fig=plt.figure(1),label=nml['files']['sim4label'],footer=nml['city']['footers'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'diffscatter.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['city']['pdfs']:
try:
sim1.compare_param(param=i,city=j,pdfs=True,label=nml['files']['sim1label'],footer=nml['city']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,city=j,pdfs=True,fig=plt.figure(1),label=nml['files']['sim2label'],footer=nml['city']['footers'])
if sim3 is not False:
sim3.compare_param(param=i,city=j,pdfs=True,fig=plt.figure(1),label=nml['files']['sim3label'],footer=nml['city']['footers'])
if sim4 is not False:
sim4.compare_param(param=i,city=j,pdfs=True,fig=plt.figure(1),label=nml['files']['sim4label'],footer=nml['city']['footers'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'pdfs.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['city']['diffpdfs']:
try:
sim1.compare_param(param=i,city=j,diffpdfs=True,label=nml['files']['sim1label'],footer=nml['city']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,city=j,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim2label'],footer=nml['city']['footers'])
if sim3 is not False:
sim3.compare_param(param=i,city=j,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim3label'],footer=nml['city']['footers'])
if sim4 is not False:
sim4.compare_param(param=i,city=j,diffpdfs=True,fig=plt.figure(1),label=nml['files']['sim4label'],footer=nml['city']['footers'])
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'diffpdfs.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['city']['taylordiagram']:
try:
dia = sim1compare_param(param=i,city=j,taylordiagram=True,label=nml['files']['sim1label'],footer=nml['city']['footers'])
if sim2 is not False:
sim2.compare_param(param=i,city=j,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim2label'],dia=dia,marker='<')
if sim3 is not False:
sim3.compare_param(param=i,city=j,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim3label'],dia=dia,maker='*')
if sim4 is not False:
sim4.compare_param(param=i,city=j,taylordiagram=True,fig=plt.figure(1),label=nml['files']['sim4label'],dia=dia,marker='D')
plt.savefig(base +'_'+i.replace('.','')+'_'+j.replace(' ','')+'_'+'taylor.jpg',dpi=75)
plt.close('all')
except:
plt.close('all')
pass
if nml['city']['stats']:
try:
if first:
df,title = utils.get_epa_location_df(sim1.df,i,city=j)
utils.write_table(df=df,fname='CITY',label=nml['files']['sim1label'],location=j)
first=False
else:
df,title = utils.get_epa_location_df(sim1.df,i,city=j)
utils.write_table(df=df,fname='CITY',label=nml['files']['sim1label'],location=j,append=True)
if sim2 is not False:
df,title = utils.get_epa_location_df(sim2.df,i,city=j)
utils.write_table(df=df,fname='CITY',label=nml['files']['sim2label'],append=True,location=j)
if sim3 is not False:
df,title = utils.get_epa_location_df(sim3.df,i,city=j)
utils.write_table(df=df,fname='CITY',label=nml['files']['sim3label'],append=True,location=j)
if sim4 is not False:
df,title = utils.get_epa_location_df(sim4.df,i,city=j)
utils.write_table(df=df,fname='CITY',label=nml['files']['sim4label'],append=True,location=j)
except:
pass
if nml['spatial']['params'].lower() != 'none':
import plots
import subprocess
from numpy import array
print '==============================================='
print ' LOADING BASEMAP'
print '==============================================='
sim1.cmaq.load_conus_basemap('.')
m = sim1.cmaq.map
if nml['spatial']['params'] == 'all':
params = sort(sim1.df.Species.unique())
else:
params = nml['spatial']['params'].split(',')
if nml['spatial']['bias']:
for i in sort(params):
df = sim1.df.groupby('Species').get_group(i)
for j,dates in enumerate(sim1.cmaq.dates[sim1.cmaq.indexdates]):
f,ax,c = plots.spatial_bias_scatter(df,sim1.cmaq.map,dates,ncolors=21,cmap='RdBu_r')
m.drawstates();m.drawcoastlines();m.drawcountries()
ax = plt.gca()
plt.title(dates.strftime('%d/%m/%Y %H Model - Obs'), fontsize=13)
c.set_label(i + ' (' + df.Units.unique()[0] + ')',fontsize=13)
plt.tight_layout()
dd = sim1.cmaq.dates[sim1.cmaq.indexdates][0].strftime('%Y%m%d.5X.')
savename = dd + i.replace('.','P') +'.spbias.' + str(j).zfill(2) + '.jpg'
print savename
plt.savefig(savename)
plt.close('all')
print 'Generating GIF Animation'
subprocess.call(['convert','-delay','50',dd+i.replace('.','P')+'.spbias.*.jpg',dd+i.replace('.','P')+'.spbias.ani.gif'])
alldates = df.datetime_local.values.astype('M8[s]').astype('O')
hours = array([int(p.strftime('%H')) for p in alldates])
day = (hours > 10) & (hours <= 18)
night = (hours > 21) | (hours <= 4)
for k,j in zip([day,night],['dt','nt']):
dfnew = df[k].groupby('SCS').mean().reset_index(level=0)
dfnew['datetime'] = sim1.cmaq.dates.min()
f,ax,c = plots.spatial_bias_scatter(dfnew,sim1.cmaq.map,sim1.cmaq.dates.min(),ncolors=21,cmap='RdBu_r')
m.drawstates();m.drawcoastlines();m.drawcountries()
ax = plt.gca()
c.set_label(i + ' (' + df.Units.unique()[0] + ')',fontsize=13)
plt.tight_layout()
dd = sim1.cmaq.dates[sim1.cmaq.indexdates][0].strftime('%Y%m%d.5X.')
savename = dd + i.replace('.','P') +'.spbias.' + j+ '.jpg'
plt.savefig(savename)
print savename
plt.close('all')
if nml['spatial']['monitors']:
for i in sort(params):
df = sim1.df.groupby('Species').get_group(i)
lats,index = unique(df.Latitude.values,return_index=True)
lons = df.Longitude.values[index]
m.plot(lons,lats,'.',latlon=True,color='dodgerblue')
m.drawstates();m.drawcoastlines();m.drawcountries()
plt.title(i + ' Monitor Locations' + str(lats.shape[0]))
plt.tight_layout()
plt.savefig('monitor_location_' + i.replace('.','P') + '.jpg')
plt.close('all')
| 58.847166
| 178
| 0.525808
| 6,954
| 58,141
| 4.311188
| 0.040265
| 0.064576
| 0.08022
| 0.095464
| 0.898899
| 0.878219
| 0.863742
| 0.832889
| 0.801434
| 0.76938
| 0
| 0.02198
| 0.298103
| 58,141
| 987
| 179
| 58.906788
| 0.712661
| 0.011919
| 0
| 0.549626
| 0
| 0
| 0.131175
| 0.001637
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.051227
| 0.020277
| null | null | 0.048026
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
e135fc484b8b18222f07e44e40bf30ff007ddac7
| 47,747
|
py
|
Python
|
workflow/tests/test_signals.py
|
sannleen/TolaActivity
|
b47154339c3a45583063ecad43b0b16ae2f8f36e
|
[
"Apache-2.0"
] | null | null | null |
workflow/tests/test_signals.py
|
sannleen/TolaActivity
|
b47154339c3a45583063ecad43b0b16ae2f8f36e
|
[
"Apache-2.0"
] | null | null | null |
workflow/tests/test_signals.py
|
sannleen/TolaActivity
|
b47154339c3a45583063ecad43b0b16ae2f8f36e
|
[
"Apache-2.0"
] | null | null | null |
import json
import logging
import os
from chargebee import APIError
from chargebee.models import Subscription
from django.core import mail
from django.test import TestCase, override_settings, tag
from mock import Mock, patch
import factories
from tola import DEMO_BRANCH, PRODUCTION_BRANCH, STAGING_BRANCH
from tola.management.commands.loadinitialdata import DEFAULT_WORKFLOW_LEVEL_1S
from workflow.models import (Dashboard, Organization, WorkflowTeam,
ROLE_PROGRAM_ADMIN, ROLE_ORGANIZATION_ADMIN,
ROLE_VIEW_ONLY, ROLE_PROGRAM_TEAM, WorkflowLevel1,
WorkflowLevel2, DEFAULT_PROGRAM_NAME)
@tag('pkg')
class AddUsersToDefaultWorkflowLevel1Test(TestCase):
def setUp(self):
os.environ['APP_BRANCH'] = ''
logging.disable(logging.ERROR)
def tearDown(self):
logging.disable(logging.NOTSET)
def test_not_demo_env(self):
factories.TolaUser() # triggers the signal
self.assertEqual(WorkflowTeam.objects.all().count(), 0)
@patch('workflow.signals.tsync')
def test_demo_env_no_wflvl1_matching(self, mock_tsync):
os.environ['APP_BRANCH'] = DEMO_BRANCH
mock_tsync.create_instance.return_value = Mock()
factories.WorkflowLevel1(name=DEFAULT_WORKFLOW_LEVEL_1S[0][1])
factories.TolaUser() # triggers the signal
self.assertEqual(WorkflowTeam.objects.all().count(), 0)
factories.WorkflowLevel1(name=DEFAULT_WORKFLOW_LEVEL_1S[1][1])
factories.TolaUser(
user=factories.User(first_name='George', last_name='Harrison')
) # triggers the signal
self.assertEqual(WorkflowTeam.objects.all().count(), 0)
factories.WorkflowLevel1(id=DEFAULT_WORKFLOW_LEVEL_1S[0][0], name='Any')
factories.TolaUser(
user=factories.User(first_name='Ringo', last_name='Starr')
) # triggers the signal
self.assertEqual(WorkflowTeam.objects.all().count(), 0)
factories.WorkflowLevel1(id=DEFAULT_WORKFLOW_LEVEL_1S[1][0], name='Any')
factories.TolaUser(
user=factories.User(first_name='Paul', last_name='McCartney')
) # triggers the signal
self.assertEqual(WorkflowTeam.objects.all().count(), 0)
@patch('workflow.signals.tsync')
def test_demo_workflowteam_assignment(self, mock_tsync):
os.environ['APP_BRANCH'] = DEMO_BRANCH
mock_tsync.create_instance.return_value = Mock()
role = factories.Group(name=ROLE_VIEW_ONLY)
wflvl1_1 = factories.WorkflowLevel1(
id=DEFAULT_WORKFLOW_LEVEL_1S[0][0],
name=DEFAULT_WORKFLOW_LEVEL_1S[0][1])
wflvl1_2 = factories.WorkflowLevel1(
id=DEFAULT_WORKFLOW_LEVEL_1S[1][0],
name=DEFAULT_WORKFLOW_LEVEL_1S[1][1])
tola_user = factories.TolaUser(
user=factories.User(first_name='Ringo', last_name='Starr')
) # triggers the signal
WorkflowTeam.objects.get(
workflow_user=tola_user, role=role, workflowlevel1=wflvl1_1)
WorkflowTeam.objects.get(
workflow_user=tola_user, role=role, workflowlevel1=wflvl1_2)
@patch('workflow.signals.tsync')
def test_demo_workflowteam_assignment_not_reassigned_on_update(
self, mock_tsync):
os.environ['APP_BRANCH'] = DEMO_BRANCH
mock_tsync.create_instance.return_value = Mock()
role = factories.Group(name=ROLE_VIEW_ONLY)
wflvl1_0 = factories.WorkflowLevel1(
id=DEFAULT_WORKFLOW_LEVEL_1S[0][0],
name=DEFAULT_WORKFLOW_LEVEL_1S[0][1])
wflvl1_1 = factories.WorkflowLevel1(
id=DEFAULT_WORKFLOW_LEVEL_1S[1][0],
name=DEFAULT_WORKFLOW_LEVEL_1S[1][1])
tola_user = factories.TolaUser(
user=factories.User(first_name='Ringo', last_name='Starr')
) # triggers the signal
tola_user.name = 'Laura Pausini'
tola_user.save()
num_results = WorkflowTeam.objects.filter(
workflow_user=tola_user, role=role, workflowlevel1=wflvl1_0).count()
self.assertEqual(num_results, 1)
num_results = WorkflowTeam.objects.filter(
workflow_user=tola_user, role=role, workflowlevel1=wflvl1_1).count()
self.assertEqual(num_results, 1)
@tag('pkg')
class CreateDefaultProgramTest(TestCase):
@override_settings(CREATE_DEFAULT_PROGRAM=False)
def test_deactivated(self):
factories.Organization() # triggers the signal
self.assertEqual(WorkflowLevel1.objects.all().count(), 0)
@override_settings(CREATE_DEFAULT_PROGRAM=True)
def test_activated(self):
organization = factories.Organization() # triggers the signal
WorkflowLevel1.objects.get(name=DEFAULT_PROGRAM_NAME,
organization=organization)
@override_settings(CREATE_DEFAULT_PROGRAM=True)
def test_activated_update(self):
organization = factories.Organization() # triggers the signal
organization.name = 'Name updated'
organization.save()
self.assertEqual(WorkflowLevel1.objects.all().count(), 1)
@tag('pkg')
class AddTolaUserAsProgramAdminTest(TestCase):
def setUp(self):
os.environ['APP_BRANCH'] = ''
logging.disable(logging.ERROR)
def tearDown(self):
logging.disable(logging.NOTSET)
@override_settings(SET_PROGRAM_ADMIN_DEFAULT=False)
@override_settings(CREATE_DEFAULT_PROGRAM=False)
def test_deactivated(self):
factories.TolaUser() # triggers the signal
self.assertEqual(WorkflowTeam.objects.all().count(), 0)
@override_settings(SET_PROGRAM_ADMIN_DEFAULT=True)
@override_settings(CREATE_DEFAULT_PROGRAM=False)
def test_activated_but_not_create_default_program(self):
factories.TolaUser() # triggers the signal
self.assertEqual(WorkflowTeam.objects.all().count(), 0)
@override_settings(SET_PROGRAM_ADMIN_DEFAULT=True)
@override_settings(CREATE_DEFAULT_PROGRAM=True)
def test_activated(self):
role_program_admin = factories.Group(name=ROLE_PROGRAM_ADMIN)
tolauser = factories.TolaUser() # triggers the signal
wft = WorkflowTeam.objects.get(workflow_user=tolauser)
self.assertEqual(wft.role, role_program_admin)
@override_settings(SET_PROGRAM_ADMIN_DEFAULT=True)
@override_settings(CREATE_DEFAULT_PROGRAM=True)
def test_activated_save_two_times(self):
"""
When the TolaUser is saved, a WorkflowTeam object for that user and
the default program is created only once.
"""
role_program_admin = factories.Group(name=ROLE_PROGRAM_ADMIN)
tolauser = factories.TolaUser() # triggers the signal
tolauser.save() # triggers the signal again
wft = WorkflowTeam.objects.get(workflow_user=tolauser)
self.assertEqual(wft.role, role_program_admin)
@override_settings(SET_PROGRAM_ADMIN_DEFAULT=True)
@override_settings(CREATE_DEFAULT_PROGRAM=True)
def test_activated_program_lowercase(self):
"""
If the default program name is written in different case letters, the
signal does not crash.
"""
role_program_admin = factories.Group(name=ROLE_PROGRAM_ADMIN)
tolauser = factories.TolaUser() # triggers the signal
wflvl1 = WorkflowLevel1.objects.get(name=DEFAULT_PROGRAM_NAME)
wflvl1.name = DEFAULT_PROGRAM_NAME.lower()
wflvl1.save()
WorkflowTeam.objects.all().delete()
tolauser.name = 'Any'
tolauser.save() # trigger again the signal
wft = WorkflowTeam.objects.get(workflow_user=tolauser)
self.assertEqual(wft.role, role_program_admin)
@tag('pkg')
class CheckSeatsSaveWFTeamsTest(TestCase):
class ExternalResponse:
def __init__(self, values):
self.subscription = Subscription(values)
self.subscription.status = 'active'
self.subscription.plan_quantity = 1
def setUp(self):
os.environ['APP_BRANCH'] = ''
logging.disable(logging.ERROR)
self.group_org_admin = factories.Group(name=ROLE_ORGANIZATION_ADMIN)
self.group_program_admin = factories.Group(name=ROLE_PROGRAM_ADMIN)
self.group_program_team = factories.Group(name=ROLE_PROGRAM_TEAM)
self.group_view_only = factories.Group(name=ROLE_VIEW_ONLY)
self.org = factories.Organization(chargebee_subscription_id='12345')
self.tola_user = factories.TolaUser(organization=self.org)
def test_check_seats_save_team_increase(self):
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
wflvl1_1 = factories.WorkflowLevel1(name='WorkflowLevel1_1')
wflvl1_2 = factories.WorkflowLevel1(name='WorkflowLevel1_2')
factories.WorkflowTeam(workflow_user=self.tola_user,
workflowlevel1=wflvl1_1,
role=self.group_program_admin)
# It should increase the seats because the user doesn't
# have any seat reserved for him
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 1)
# It shouldn't increase the seats because the user already
# has a seat reserved for him
factories.WorkflowTeam(workflow_user=self.tola_user,
workflowlevel1=wflvl1_2,
role=self.group_program_admin)
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 1)
def test_check_seats_save_team_decrease(self):
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
wflvl1_1 = factories.WorkflowLevel1(name='WorkflowLevel1_1')
wflvl1_2 = factories.WorkflowLevel1(name='WorkflowLevel1_2')
wfteam1_1 = factories.WorkflowTeam(workflow_user=self.tola_user,
workflowlevel1=wflvl1_1,
role=self.group_program_admin)
wfteam1_2 = factories.WorkflowTeam(workflow_user=self.tola_user,
workflowlevel1=wflvl1_2,
role=self.group_program_admin)
# It shouldn't increase the amount of used seats
# the user already has a WorkflowTeam
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 1)
# It shouldn't decrease the seats because the user still has
# another WorkflowTeam
wfteam1_1.role = self.group_view_only
wfteam1_1.save()
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 1)
# It should decrease the seats because the user isn't Org Admin
# and doesn't have another WorkflowTeam
wfteam1_2.role = self.group_view_only
wfteam1_2.save()
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 0)
def test_check_seats_save_team_without_subscription(self):
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
self.tola_user.organization = factories.Organization()
wflvl1 = factories.WorkflowLevel1(name='WorkflowLevel1')
factories.WorkflowTeam(workflow_user=self.tola_user,
workflowlevel1=wflvl1,
role=self.group_program_admin)
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 0)
@patch('workflow.signals.tsync')
def test_check_seats_save_team_demo(self, mock_tsync):
os.environ['APP_BRANCH'] = DEMO_BRANCH
mock_tsync.create_instance.return_value = Mock()
self.tola_user.organization = factories.Organization()
wflvl1 = factories.WorkflowLevel1(name='WorkflowLevel1')
factories.WorkflowTeam(workflow_user=self.tola_user,
workflowlevel1=wflvl1,
role=self.group_program_admin)
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 0)
def test_check_seats_save_team_org_admin(self):
# When a user is an org admin, the seat has to be updated with the
# user groups signal, that's why it shouldn't be changed in this case.
self.tola_user.user.groups.add(self.group_org_admin)
self.tola_user.user.save()
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
wflvl1 = factories.WorkflowLevel1(name='WorkflowLevel1')
factories.WorkflowTeam(workflow_user=self.tola_user,
workflowlevel1=wflvl1,
role=self.group_program_admin)
# It should have only one seat because of the Org Admin role
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 1)
@override_settings(DEFAULT_REPLY_TO='noreply@example.com')
@override_settings(SALES_TEAM_EMAIL='sales@example.com')
@override_settings(PAYMENT_PORTAL_URL='example.com')
def test_check_seats_save_team_exceed_notify(self):
os.environ['APP_BRANCH'] = PRODUCTION_BRANCH
header = {'category': ['exceed_paid_plan', 'from_production_email']}
headers = {'X-SMTPAPI': json.dumps(header)}
self.tola_user.user.groups.add(self.group_org_admin)
self.tola_user.user.save()
self.org = Organization.objects.get(pk=self.org.id)
user = factories.User(first_name='John', last_name='Lennon')
tolauser = factories.TolaUser(user=user, organization=self.org)
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
wflvl1 = factories.WorkflowLevel1(name='WorkflowLevel1')
factories.WorkflowTeam(workflow_user=tolauser,
workflowlevel1=wflvl1,
role=self.group_program_admin)
# It should notify the OrgAdmin
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 2)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Edit user exceeding notification',
mail.outbox[0].subject)
self.assertEqual(mail.outbox[0].to, [self.tola_user.user.email])
self.assertEqual(mail.outbox[0].reply_to, ['noreply@example.com'])
self.assertEqual(mail.outbox[0].bcc, ['sales@example.com'])
self.assertEqual(mail.outbox[0].extra_headers, headers)
# Text body
org_admin_name = 'Hi {},'.format(self.tola_user.name)
self.assertIn(org_admin_name, mail.outbox[0].body)
available_seats = 'Purchased user seats: 1'
self.assertIn(available_seats, mail.outbox[0].body)
used_seats = 'Current edit users in the system: 2'
self.assertIn(used_seats, mail.outbox[0].body)
payment_portal_url = 'example.com'
self.assertIn(payment_portal_url, mail.outbox[0].body)
# HTML body
org_admin_name = '<br>Hi {},</span>'.format(self.tola_user.name)
self.assertIn(org_admin_name, mail.outbox[0].alternatives[0][0])
available_seats = 'Purchased user seats: <b>1</b>'
self.assertIn(available_seats, mail.outbox[0].alternatives[0][0])
used_seats = 'Current edit users in the system: <b>2</b>'
self.assertIn(used_seats, mail.outbox[0].alternatives[0][0])
payment_portal_url = '<a href="example.com" target="_blank">Payment ' \
'portal</a>'
self.assertIn(payment_portal_url, mail.outbox[0].alternatives[0][0])
self.assertIn('Payment portal', mail.outbox[0].alternatives[0][0])
def test_check_seats_save_team_retrieve_subscription_fails(self):
"""
The number of seats will be increased in the system but it's not
possible to check the quantity of the plan because the retrieve
failed.
"""
self.tola_user.user.groups.add(self.group_org_admin)
self.tola_user.user.save()
self.org = Organization.objects.get(pk=self.org.id)
user = factories.User(first_name='John', last_name='Lennon')
tolauser = factories.TolaUser(user=user, organization=self.org)
json_obj = {
'message': "Sorry, we couldn't find that resource",
'error_code': 'resource_not_found'
}
sub_response = APIError(404, json_obj)
Subscription.retrieve = Mock(side_effect=sub_response)
wflvl1 = factories.WorkflowLevel1(name='WorkflowLevel1')
factories.WorkflowTeam(workflow_user=tolauser,
workflowlevel1=wflvl1,
role=self.group_program_admin)
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 2)
self.assertEqual(len(mail.outbox), 0)
@override_settings(DEFAULT_REPLY_TO='noreply@example.com')
@override_settings(SALES_TEAM_EMAIL='sales@example.com')
@override_settings(PAYMENT_PORTAL_URL='example.com')
def test_exceeded_seats_not_notify_when_role_changed(self):
""" If user is org admin and program admin and users orgadmin
role removed then org admin should not get notification because
user still has seat as program admin."""
os.environ['APP_BRANCH'] = STAGING_BRANCH
header = {'category': ['exceed_paid_plan', 'from_staging_email']}
headers = {'X-SMTPAPI': json.dumps(header)}
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
self.tola_user.user.groups.add(self.group_org_admin)
self.tola_user.user.save()
self.org = Organization.objects.get(pk=self.org.id)
user = factories.User(first_name='John', last_name='Lennon')
tolauser = factories.TolaUser(user=user, organization=self.org)
tolauser.user.save()
wflvl1_1 = factories.WorkflowLevel1(name='WorkflowLevel1_1')
wfl_team = factories.WorkflowTeam(workflow_user=tolauser,
workflowlevel1=wflvl1_1,
role=self.group_program_team)
# It should notify the OrgAdmin
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 2)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Edit user exceeding notification',
mail.outbox[0].subject)
# after role changed it should not notify
wfl_team.role = self.group_program_admin
wfl_team.save()
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 2)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].extra_headers, headers)
@override_settings(DEFAULT_REPLY_TO='noreply@example.com')
@override_settings(SALES_TEAM_EMAIL='sales@example.com')
@override_settings(PAYMENT_PORTAL_URL='example.com')
def test_exceeded_seats_not_notify_when_role_removed(self):
""" If user is org admin and program admin and users orgadmin
role removed then org admin should not get notification because
user still has seat as program admin."""
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
self.tola_user.user.groups.add(self.group_org_admin)
self.tola_user.user.save()
self.org = Organization.objects.get(pk=self.org.id)
user = factories.User(first_name='John', last_name='Lennon')
tolauser = factories.TolaUser(user=user, organization=self.org)
tolauser.user.save()
wflvl1_1 = factories.WorkflowLevel1(name='WorkflowLevel1_1')
wfl_team = factories.WorkflowTeam(workflow_user=tolauser,
workflowlevel1=wflvl1_1,
role=self.group_program_team)
# It should notify the OrgAdmin
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 2)
self.assertEqual(len(mail.outbox), 1)
self.assertIn('Edit user exceeding notification',
mail.outbox[0].subject)
# after role changed it should not notify
wfl_team.role = self.group_view_only
wfl_team.save()
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 1)
self.assertEqual(len(mail.outbox), 1)
@tag('pkg')
class CheckSeatsDeleteWFTeamsTest(TestCase):
class ExternalResponse:
def __init__(self, values):
self.subscription = Subscription(values)
self.subscription.status = 'active'
self.subscription.plan_quantity = 1
def setUp(self):
os.environ['APP_BRANCH'] = ''
logging.disable(logging.ERROR)
self.group_org_admin = factories.Group(name=ROLE_ORGANIZATION_ADMIN)
self.group_program_admin = factories.Group(name=ROLE_PROGRAM_ADMIN)
self.org = factories.Organization(chargebee_subscription_id='12345')
self.tola_user = factories.TolaUser(organization=self.org)
def test_check_seats_delete_team_decrease(self):
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
wflvl1 = factories.WorkflowLevel1(name='WorkflowLevel1')
wfteam = factories.WorkflowTeam(workflow_user=self.tola_user,
workflowlevel1=wflvl1,
role=self.group_program_admin)
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 1)
wfteam.delete()
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 0)
def test_check_seats_delete_team_not_decrease(self):
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
wflvl1_1 = factories.WorkflowLevel1(name='WorkflowLevel1_1')
wflvl1_2 = factories.WorkflowLevel1(name='WorkflowLevel1_2')
wfteam1_1 = factories.WorkflowTeam(workflow_user=self.tola_user,
workflowlevel1=wflvl1_1,
role=self.group_program_admin)
factories.WorkflowTeam(workflow_user=self.tola_user,
workflowlevel1=wflvl1_2,
role=self.group_program_admin)
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 1)
wfteam1_1.delete()
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 1)
@patch('workflow.signals.tsync')
def test_check_seats_save_team_demo(self, mock_tsync):
os.environ['APP_BRANCH'] = DEMO_BRANCH
mock_tsync.create_instance.return_value = Mock()
self.tola_user.organization = factories.Organization()
wflvl1 = factories.WorkflowLevel1(name='WorkflowLevel1')
factories.WorkflowTeam(workflow_user=self.tola_user,
workflowlevel1=wflvl1,
role=self.group_program_admin)
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 0)
def test_check_seats_save_team_org_admin(self):
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
self.tola_user.user.groups.add(self.group_org_admin)
self.tola_user.user.save()
wflvl1 = factories.WorkflowLevel1(name='WorkflowLevel1')
wfteam = factories.WorkflowTeam(workflow_user=self.tola_user,
workflowlevel1=wflvl1,
role=self.group_program_admin)
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 1)
wfteam.delete()
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 1)
@tag('pkg')
class CheckSeatsSaveUserGroupTest(TestCase):
class ExternalResponse:
def __init__(self, values):
self.subscription = Subscription(values)
self.subscription.status = 'active'
self.subscription.plan_quantity = 1
def setUp(self):
os.environ['APP_BRANCH'] = ''
logging.disable(logging.ERROR)
self.group_org_admin = factories.Group(name=ROLE_ORGANIZATION_ADMIN)
self.group_view_only = factories.Group(name=ROLE_VIEW_ONLY)
self.group_program_admin = factories.Group(name=ROLE_PROGRAM_ADMIN)
self.org = factories.Organization(chargebee_subscription_id='12345')
self.tola_user = factories.TolaUser(organization=self.org)
def test_check_seats_save_user_groups_increase(self):
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
self.tola_user.user.groups.add(self.group_org_admin)
self.tola_user.user.save()
# It should have only one seat because of the Org Admin role
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 1)
def test_check_seats_save_user_groups_decrease(self):
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
self.tola_user.user.groups.add(self.group_org_admin)
self.tola_user.user.save()
# It should have only one seat because of the Org Admin role
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 1)
self.tola_user.user.groups.remove(self.group_org_admin)
# The user doesn't have any WorkflowTeam and isn't Org Admin anymore
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 0)
def test_check_seats_save_user_groups_viewonly_doent_affect(self):
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
self.tola_user.user.groups.add(self.group_view_only)
self.tola_user.user.save()
# It should have only one seat because of the Org Admin role
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 0)
self.tola_user.user.groups.remove(self.group_view_only)
# The user doesn't have any WorkflowTeam and isn't Org Admin anymore
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 0)
def test_check_seats_save_user_groups_without_subscription(self):
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
self.tola_user.organization = factories.Organization()
self.tola_user.save()
self.tola_user.user.groups.add(self.group_org_admin)
self.tola_user.user.save()
# It should have only one seat because of the Org Admin role
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 0)
@patch('workflow.signals.tsync')
def test_check_seats_save_user_groups_demo(self, mock_tsync):
os.environ['APP_BRANCH'] = DEMO_BRANCH
mock_tsync.create_instance.return_value = Mock()
self.tola_user.organization = factories.Organization()
self.tola_user.save()
self.tola_user.user.groups.add(self.group_org_admin)
self.tola_user.user.save()
# It should have only one seat because of the Org Admin role
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 0)
@override_settings(DEFAULT_REPLY_TO='noreply@example.com')
@override_settings(SALES_TEAM_EMAIL='sales@example.com')
@override_settings(PAYMENT_PORTAL_URL='example.com')
def test_check_seats_save_user_groups_exceed_notify(self):
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
self.tola_user.user.groups.add(self.group_org_admin)
self.tola_user.user.save()
self.org = Organization.objects.get(pk=self.org.id)
user = factories.User(first_name='John', last_name='Lennon')
tolauser = factories.TolaUser(user=user, organization=self.org)
tolauser.user.groups.add(self.group_org_admin)
tolauser.user.save()
# It should notify the OrgAdmin
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 2)
self.assertEqual(len(mail.outbox), 2)
self.assertIn('Edit user exceeding notification',
mail.outbox[0].subject)
for outbox in mail.outbox:
self.assertEqual(outbox.reply_to, ['noreply@example.com'])
self.assertEqual(outbox.bcc, ['sales@example.com'])
self.assertIn(outbox.to[0], [self.tola_user.user.email,
user.email])
# Text body
org_admin_name = ''
if outbox.to[0] == self.tola_user.user.email:
org_admin_name = 'Hi {},'.format(self.tola_user.name)
elif outbox.to[0] == user.email:
org_admin_name = 'Hi {},'.format(tolauser.name)
self.assertIn(org_admin_name, outbox.body)
available_seats = 'Purchased user seats: 1'
self.assertIn(available_seats, outbox.body)
used_seats = 'Current edit users in the system: 2'
self.assertIn(used_seats, outbox.body)
payment_portal_url = 'example.com'
self.assertIn(payment_portal_url, outbox.body)
# HTML body
org_admin_name = ''
if outbox.to[0] == self.tola_user.user.email:
org_admin_name = '<br>Hi {},</span>'.format(self.tola_user.name)
elif outbox.to[0] == user.email:
org_admin_name = '<br>Hi {},</span>'.format(tolauser.name)
self.assertIn(org_admin_name, outbox.alternatives[0][0])
available_seats = 'Purchased user seats: <b>1</b>'
self.assertIn(available_seats, outbox.alternatives[0][0])
used_seats = 'Current edit users in the system: <b>2</b>'
self.assertIn(used_seats, outbox.alternatives[0][0])
payment_portal_url = '<a href="example.com" target="_blank">' \
'Payment portal</a>'
self.assertIn(payment_portal_url, outbox.alternatives[0][0])
def test_check_seats_save_user_groups_retrieve_subscription_fails(self):
"""
The number of seats will be increased in the system but it's not
possible to check the quantity of the plan because the retrieve
failed.
"""
json_obj = {
'message': "Sorry, we couldn't find that resource",
'error_code': 'resource_not_found'
}
sub_response = APIError(404, json_obj)
Subscription.retrieve = Mock(side_effect=sub_response)
self.tola_user.user.groups.add(self.group_org_admin)
self.tola_user.user.save()
self.org = Organization.objects.get(pk=self.org.id)
user = factories.User(first_name='John', last_name='Lennon')
tolauser = factories.TolaUser(user=user, organization=self.org)
tolauser.user.groups.add(self.group_org_admin)
tolauser.user.save()
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 2)
self.assertEqual(len(mail.outbox), 0)
@override_settings(DEFAULT_REPLY_TO='noreply@example.com')
@override_settings(SALES_TEAM_EMAIL='sales@example.com')
@override_settings(PAYMENT_PORTAL_URL='example.com')
def test_exceeded_seats_not_notify_when_role_removed(self):
""" If user has already seat as an orgadmin and when its role removed
than org admin should not get notification email """
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
self.tola_user.user.groups.add(self.group_org_admin)
self.tola_user.user.save()
self.org = Organization.objects.get(pk=self.org.id)
self.assertEqual(self.org.chargebee_used_seats, 1)
self.assertEqual(len(mail.outbox), 0)
user = factories.User(first_name='John', last_name='Lennon')
tolauser = factories.TolaUser(user=user, organization=self.org)
tolauser.user.groups.add(self.group_org_admin)
tolauser.user.save()
self.org = Organization.objects.get(pk=self.org.id)
self.assertEqual(self.org.chargebee_used_seats, 2)
self.assertEqual(len(mail.outbox), 2)
self.assertIn('Edit user exceeding notification',
mail.outbox[0].subject)
user2 = factories.User(first_name='Leonard', last_name='Cohen')
tolauser2 = factories.TolaUser(user=user2, organization=self.org)
tolauser2.user.groups.add(self.group_org_admin)
tolauser2.user.save()
self.org = Organization.objects.get(pk=self.org.id)
self.assertEqual(self.org.chargebee_used_seats, 3)
self.assertEqual(len(mail.outbox), 5)
# org admin removed but mail outbox still should be same
tolauser2.user.groups.remove(self.group_org_admin)
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 2)
self.assertEqual(len(mail.outbox), 5)
@override_settings(DEFAULT_REPLY_TO='noreply@example.com')
@override_settings(SALES_TEAM_EMAIL='sales@example.com')
@override_settings(PAYMENT_PORTAL_URL='example.com')
def test_exceeded_seats_not_notify_when_one_of_multiple_role_removed(self):
""" If user is org admin and program admin at the same time and users
orgadmin role removed then org admin should not get notification
because user still has seat as program admin."""
external_response = self.ExternalResponse(None)
Subscription.retrieve = Mock(return_value=external_response)
self.tola_user.user.groups.add(self.group_org_admin)
self.tola_user.user.save()
user = factories.User(first_name='John', last_name='Lennon')
self.org = Organization.objects.get(pk=self.org.id)
tolauser = factories.TolaUser(user=user, organization=self.org)
tolauser.user.groups.add(self.group_org_admin)
tolauser.user.save()
wflvl1_1 = factories.WorkflowLevel1(name='WorkflowLevel1_1')
factories.WorkflowTeam(workflow_user=tolauser,
workflowlevel1=wflvl1_1,
role=self.group_program_admin)
# It should notify the OrgAdmin
organization = Organization.objects.get(pk=self.org.id)
self.assertEqual(organization.chargebee_used_seats, 2)
self.assertEqual(len(mail.outbox), 2)
self.assertIn('Edit user exceeding notification',
mail.outbox[0].subject)
tolauser.user.groups.remove(self.group_org_admin)
self.assertEqual(organization.chargebee_used_seats, 2)
self.assertEqual(len(mail.outbox), 2)
@tag('pkg')
class AddPublicUrlTokenTest(TestCase):
def setUp(self):
self.tola_user = factories.TolaUser()
def test_add_public_url_token_create_not_public_url(self):
dashboard = Dashboard.objects.create(user=self.tola_user)
self.assertNotEqual(dashboard.public,
{'all': False, 'org': False, 'url': False})
self.assertIsNone(dashboard.public_url_token)
def test_add_public_url_token_create_public_url(self):
dashboard = Dashboard.objects.create(
user=self.tola_user,
public={'all': False, 'org': False, 'url': True})
self.assertEqual(dashboard.public,
{'all': False, 'org': False, 'url': True})
self.assertIsNotNone(dashboard.public_url_token)
def test_add_public_url_token_update_not_public_url(self):
dashboard = Dashboard.objects.create(user=self.tola_user)
dashboard.public = {'all': True, 'org': False, 'url': False}
dashboard.save()
self.assertEqual(dashboard.public,
{'all': True, 'org': False, 'url': False})
self.assertIsNone(dashboard.public_url_token)
def test_add_public_url_token_update_public_url(self):
dashboard = Dashboard.objects.create(user=self.tola_user)
dashboard.public = {'all': False, 'org': False, 'url': True}
dashboard.save()
self.assertEqual(dashboard.public,
{'all': False, 'org': False, 'url': True})
self.assertIsNotNone(dashboard.public_url_token)
def test_add_public_url_token_create_and_update(self):
dashboard = Dashboard.objects.create(
user=self.tola_user,
public={'all': False, 'org': False, 'url': True})
self.assertEqual(dashboard.public,
{'all': False, 'org': False, 'url': True})
self.assertIsNotNone(dashboard.public_url_token)
dashboard.public = {'all': True, 'org': False, 'url': False}
dashboard.save()
self.assertEqual(dashboard.public,
{'all': True, 'org': False, 'url': False})
self.assertIsNone(dashboard.public_url_token)
@tag('pkg')
class SignalSyncTrackTest(TestCase):
def setUp(self):
factories.Group()
self.tola_user = factories.TolaUser()
@override_settings(TOLA_TRACK_URL='https://tolatrack.com')
@override_settings(TOLA_TRACK_TOKEN='TheToken')
@override_settings(TOLA_TRACK_SYNC_ENABLED=True)
@patch('workflow.signals.tsync')
def test_sync_save_create(self, mock_tsync):
mock_tsync.create_instance.return_value = Mock()
org = factories.Organization()
mock_tsync.create_instance.assert_called_with(org)
wfl1 = factories.WorkflowLevel1()
mock_tsync.create_instance.assert_called_with(wfl1)
@override_settings(TOLA_TRACK_URL='https://tolatrack.com')
@override_settings(TOLA_TRACK_TOKEN='TheToken')
@override_settings(TOLA_TRACK_SYNC_ENABLED=False)
@patch('workflow.signals.tsync')
def test_sync_save_create_disabled(self, mock_tsync):
mock_tsync.create_instance.return_value = Mock()
factories.Organization()
self.assertFalse(mock_tsync.create_instance.called)
factories.WorkflowLevel1()
self.assertFalse(mock_tsync.create_instance.called)
@override_settings(TOLA_TRACK_URL='https://tolatrack.com')
@override_settings(TOLA_TRACK_TOKEN='TheToken')
@override_settings(TOLA_TRACK_SYNC_ENABLED=True)
@patch('workflow.signals.tsync')
def test_sync_save_update(self, mock_tsync):
mock_tsync.create_instance.return_value = Mock()
mock_tsync.update_instance.return_value = Mock()
org = factories.Organization()
wfl1 = factories.WorkflowLevel1()
org.name = 'Another Org'
org.description = 'The Org name was changed'
org.save()
mock_tsync.update_instance.assert_called_with(org)
wfl1.name = 'Another Program'
wfl1.save()
mock_tsync.update_instance.assert_called_with(wfl1)
@override_settings(TOLA_TRACK_URL='https://tolatrack.com')
@override_settings(TOLA_TRACK_TOKEN='TheToken')
@override_settings(TOLA_TRACK_SYNC_ENABLED=False)
@patch('workflow.signals.tsync')
def test_sync_save_update_disabled(self, mock_tsync):
mock_tsync.create_instance.return_value = Mock()
mock_tsync.update_instance.return_value = Mock()
org = factories.Organization()
wfl1 = factories.WorkflowLevel1()
org.name = 'Another Org'
org.description = 'The Org name was changed'
org.save()
self.assertFalse(mock_tsync.update_instance.called)
wfl1.name = 'Another Program'
wfl1.save()
self.assertFalse(mock_tsync.update_instance.called)
@override_settings(TOLA_TRACK_URL='https://tolatrack.com')
@override_settings(TOLA_TRACK_TOKEN='TheToken')
@override_settings(TOLA_TRACK_SYNC_ENABLED=True)
@patch('workflow.signals.tsync')
def test_sync_save_delete(self, mock_tsync):
mock_tsync.create_instance.return_value = Mock()
mock_tsync.delete_instance.return_value = Mock()
org = factories.Organization()
wfl1 = factories.WorkflowLevel1()
org.delete()
mock_tsync.delete_instance.assert_called_with(org)
wfl1.delete()
mock_tsync.delete_instance.assert_called_with(wfl1)
@override_settings(TOLA_TRACK_URL='https://tolatrack.com')
@override_settings(TOLA_TRACK_TOKEN='TheToken')
@override_settings(TOLA_TRACK_SYNC_ENABLED=False)
@patch('workflow.signals.tsync')
def test_sync_save_delete_disabled(self, mock_tsync):
mock_tsync.create_instance.return_value = Mock()
mock_tsync.delete_instance.return_value = Mock()
org = factories.Organization()
wfl1 = factories.WorkflowLevel1()
org.delete()
self.assertFalse(mock_tsync.delete_instance.called)
wfl1.delete()
self.assertFalse(mock_tsync.delete_instance.called)
@tag('pkg')
class BudgetSaveTest(TestCase):
def setUp(self):
self.organization = factories.Organization()
self.user = factories.User()
self.tola_user = factories.TolaUser(organization=self.organization)
def test_save_budget_value_without_wfl2_create(self):
"""
When a budget is created without wfl2s, it's should work properly
"""
budget = factories.Budget(proposed_value=100, actual_value=10)
self.assertEqual(budget.proposed_value, 100.00)
self.assertEqual(budget.actual_value, 10.00)
def test_save_budget_value_from_wfl2_create(self):
"""
When a budget is created, the related wfl2s total_estimated_budget
should be updated
"""
wflvl1 = factories.WorkflowLevel1(name='WorkflowLevel1',
organization=self.organization)
wflvl2 = factories.WorkflowLevel2(name='WorkflowLevel2',
workflowlevel1=wflvl1,
total_estimated_budget=0,
actual_cost=0)
factories.Budget(proposed_value=100,
actual_value=10,
workflowlevel2=wflvl2)
wflvl2 = WorkflowLevel2.objects.get(pk=wflvl2.pk)
self.assertEqual(wflvl2.total_estimated_budget, 100.00)
self.assertEqual(wflvl2.actual_cost, 10.00)
def test_save_budget_value_from_wfl2_create_more(self):
"""
When a budget is updated, the related wfl2s total_estimated_budget
should be updated
"""
wflvl1 = factories.WorkflowLevel1(name='WorkflowLevel1',
organization=self.organization)
wflvl2 = factories.WorkflowLevel2(name='WorkflowLevel2',
workflowlevel1=wflvl1,
total_estimated_budget=0,
actual_cost=0)
factories.Budget(proposed_value=100,
actual_value=10,
workflowlevel2=wflvl2)
factories.Budget(proposed_value=80,
actual_value=20,
workflowlevel2=wflvl2)
wflvl2 = WorkflowLevel2.objects.get(pk=wflvl2.pk)
self.assertEqual(wflvl2.total_estimated_budget, 180.00)
self.assertEqual(wflvl2.actual_cost, 30.00)
def test_save_budget_value_from_wfl2_update(self):
"""
When a budget is updated, the related wfl2s total_estimated_budget
should be updated
"""
wflvl1 = factories.WorkflowLevel1(name='WorkflowLevel1',
organization=self.organization)
wflvl2 = factories.WorkflowLevel2(name='WorkflowLevel2',
workflowlevel1=wflvl1,
total_estimated_budget=0,
actual_cost=0)
budget = factories.Budget(proposed_value=100,
actual_value=10,
workflowlevel2=wflvl2)
budget.proposed_value = 150
budget.actual_value = 50
budget.save()
wflvl2 = WorkflowLevel2.objects.get(pk=wflvl2.pk)
self.assertEqual(wflvl2.total_estimated_budget, 150.00)
self.assertEqual(wflvl2.actual_cost, 50.00)
@tag('pkg')
class BudgetDeleteTest(TestCase):
def setUp(self):
self.organization = factories.Organization()
self.user = factories.User()
self.tola_user = factories.TolaUser(organization=self.organization)
def test_delete_budget_value_from_wfl2(self):
"""
When budget deleted, then related wfl2s total_estimated_budget
should be updated
"""
wflvl1 = factories.WorkflowLevel1(name='WorkflowLevel1',
organization=self.organization)
wflvl2 = factories.WorkflowLevel2(name='WorkflowLevel2',
workflowlevel1=wflvl1,
total_estimated_budget=0,
actual_cost=0)
factories.Budget(proposed_value=100,
actual_value=10,
workflowlevel2=wflvl2)
budget_2 = factories.Budget(proposed_value=80,
actual_value=20,
workflowlevel2=wflvl2)
wflvl2 = WorkflowLevel2.objects.get(pk=wflvl2.pk)
self.assertEqual(wflvl2.total_estimated_budget, 180.00)
self.assertEqual(wflvl2.actual_cost, 30.00)
budget_2.delete()
wflvl2 = WorkflowLevel2.objects.get(pk=wflvl2.pk)
self.assertEqual(wflvl2.total_estimated_budget, 100.00)
self.assertEqual(wflvl2.actual_cost, 10.00)
| 43.96593
| 80
| 0.665989
| 5,486
| 47,747
| 5.572366
| 0.059059
| 0.04318
| 0.02787
| 0.032974
| 0.915473
| 0.903009
| 0.882499
| 0.851063
| 0.827641
| 0.811645
| 0
| 0.015041
| 0.23834
| 47,747
| 1,085
| 81
| 44.006452
| 0.825556
| 0.067564
| 0
| 0.779975
| 0
| 0
| 0.060161
| 0.006984
| 0
| 0
| 0
| 0
| 0.161928
| 1
| 0.07911
| false
| 0
| 0.014833
| 0
| 0.110012
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e136a28995b75cc315e2a1362b822229e859378a
| 170
|
py
|
Python
|
tests/test_version.py
|
leukgen/register_toil
|
4aa1767ab64d3dec8ada90d41d2b746a1ab29330
|
[
"MIT"
] | 4
|
2019-03-21T22:31:17.000Z
|
2021-04-29T10:52:51.000Z
|
tests/test_version.py
|
leukgen/register_toil
|
4aa1767ab64d3dec8ada90d41d2b746a1ab29330
|
[
"MIT"
] | 3
|
2019-03-26T20:27:34.000Z
|
2021-05-13T02:45:52.000Z
|
tests/test_version.py
|
leukgen/register_toil
|
4aa1767ab64d3dec8ada90d41d2b746a1ab29330
|
[
"MIT"
] | null | null | null |
"""register_apps version test."""
from register_apps import __version__
def test_version():
"""Sample test for the __version__ variable."""
assert __version__
| 18.888889
| 51
| 0.735294
| 20
| 170
| 5.5
| 0.6
| 0.218182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.164706
| 170
| 8
| 52
| 21.25
| 0.774648
| 0.405882
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e19aeae76720ab94c894731749dc2b745ffd99d8
| 18,238
|
py
|
Python
|
test/cmd/unix/test_cmd_sftp.py
|
jochenparm/moler
|
0253d677e0ef150206758c7991197ba5687d0965
|
[
"BSD-3-Clause"
] | 57
|
2018-02-20T08:16:47.000Z
|
2022-03-28T10:36:57.000Z
|
test/cmd/unix/test_cmd_sftp.py
|
jochenparm/moler
|
0253d677e0ef150206758c7991197ba5687d0965
|
[
"BSD-3-Clause"
] | 377
|
2018-07-19T11:56:27.000Z
|
2021-07-09T13:08:12.000Z
|
test/cmd/unix/test_cmd_sftp.py
|
jochenparm/moler
|
0253d677e0ef150206758c7991197ba5687d0965
|
[
"BSD-3-Clause"
] | 24
|
2018-04-14T20:49:40.000Z
|
2022-03-29T10:44:26.000Z
|
# -*- coding: utf-8 -*-
"""
SFTP command test module.
"""
__author__ = 'Agnieszka Bylica, Marcin Usielski'
__copyright__ = 'Copyright (C) 2018-2019, Nokia'
__email__ = 'agnieszka.bylica@nokia.com, marcin.usielski@nokia.com'
import pytest
import time
from moler.cmd.unix.sftp import Sftp
from moler.exceptions import CommandFailure
import datetime
def test_sftp_returns_proper_command_string(buffer_connection):
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host="myhost.com", password='1234')
assert "sftp myhost.com" == sftp_cmd.command_string
def test_sftp_returns_proper_command_string_options(buffer_connection):
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host="myhost.com", password='1234', options='-4')
assert "sftp -4 myhost.com" == sftp_cmd.command_string
def test_sftp_returns_proper_command_string_user(buffer_connection):
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host="myhost.com", user="fred", password='1234')
assert "sftp fred@myhost.com" == sftp_cmd.command_string
def test_sftp_returns_proper_command_string_pathname(buffer_connection):
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host="myhost.com", user="fred", password='1234',
source_path="/home/fred/homework.txt")
assert "sftp fred@myhost.com:/home/fred/homework.txt" == sftp_cmd.command_string
def test_sftp_returns_proper_command_string_new_pathname(buffer_connection):
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host="myhost.com", user="fred", password='1234',
source_path="/home/fred/homework.txt", destination_path="/home/vivi/new_homework.txt")
assert "sftp fred@myhost.com:/home/fred/homework.txt /home/vivi/new_homework.txt" == sftp_cmd.command_string
def test_sftp_returns_proper_result(buffer_connection, command_output_and_expected_result):
command_output, expected_result = command_output_and_expected_result
buffer_connection.remote_inject_response([command_output])
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host='192.168.0.102', user='fred', password='1234',
options='-4', source_path='bear', destination_path='/home/xyz/Docs/bear')
result = sftp_cmd()
assert result == expected_result
@pytest.fixture
def command_output_and_expected_result():
data = """xyz@debian:/home$ sftp -4 fred@192.168.0.102:bear /home/xyz/Docs/bear
The authenticity of host '192.168.0.102 (192.168.0.102)' can't be established.
ECDSA key fingerprint is SHA256:ghQ3iy/gH4YTqZOggql1eJCe3EETOOpn5yANJwFeRt0.
Are you sure you want to continue connecting (yes/no)?
Warning: Permanently added '192.168.0.102' (ECDSA) to the list of known hosts.
fred@192.168.0.102's password:
Permission denied, please try again.
fred@192.168.0.102's password:
Connected to 192.168.0.102.
Fetching /upload/bear to /home/xyz/Docs/bear
/upload/bear 100% 23 34.4KB/s 00:00
xyz@debian:/home$"""
result = {'RESULT': ["Fetching /upload/bear to /home/xyz/Docs/bear",
"/upload/bear 100% 23 34.4KB/s 00:00"]}
return data, result
def test_sftp_raises_authentication_failure(buffer_connection,
command_output_and_expected_result_authentication_failure):
command_output, expected_result = command_output_and_expected_result_authentication_failure
buffer_connection.remote_inject_response([command_output])
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host='192.168.0.102', user='fred', password='1234',
source_path='cat', destination_path='/home/xyz/Docs/cat')
with pytest.raises(CommandFailure):
sftp_cmd()
@pytest.fixture
def command_output_and_expected_result_authentication_failure():
data = """xyz@debian:/home$ sftp fred@192.168.0.102:cat /home/xyz/Docs/cat
fred@192.168.0.102's password:
Permission denied, please try again.
fred@192.168.0.102's password:
Permission denied, please try again.
fred@192.168.0.102's password:
Permission denied (publickey,password).
xyz@debian:/home$"""
result = dict()
return data, result
def test_sftp_raises_file_error_file_not_found(buffer_connection, command_output_and_expected_result_file_not_found):
command_output, expected_result = command_output_and_expected_result_file_not_found
buffer_connection.remote_inject_response([command_output])
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host='192.168.0.102', user='fred', password='1234',
source_path='dog', destination_path='/home/xyz/Docs/dog')
with pytest.raises(CommandFailure):
sftp_cmd()
@pytest.fixture
def command_output_and_expected_result_file_not_found():
data = """xyz@debian:/home$ sftp fred@192.168.0.102:dog /home/xyz/Docs/dog
fred@192.168.0.102's password:
Connected to 192.168.0.102.
File "/upload/dog" not found.
xyz@debian:/home$"""
result = dict()
return data, result
def test_sftp_raises_file_error_nol_such_file(buffer_connection, command_output_and_expected_result_no_such_file):
command_output, expected_result = command_output_and_expected_result_no_such_file
buffer_connection.remote_inject_response([command_output])
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host='192.168.0.102', user='fred', password='1234',
source_path='dog', destination_path='/home/xyz/Work/dog')
with pytest.raises(CommandFailure):
sftp_cmd()
@pytest.fixture
def command_output_and_expected_result_no_such_file():
data = """xyz@debian:/home$ sftp fred@192.168.0.102:dog /home/xyz/Work/dog
fred@192.168.0.102's password:
Connected to 192.168.0.102.
Couldn't open local file "/home/xyz/Work/dog" for writing: No such file or directory
xyz@debian:/home$"""
result = dict()
return data, result
def test_sftp_raises_connection_error(buffer_connection, command_output_and_expected_result_connection_error):
command_output, expected_result = command_output_and_expected_result_connection_error
buffer_connection.remote_inject_response([command_output])
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host='192.168.0.102', user='fred', password='1234',
options='-6', command='get animals/pets/dog /root/dog')
with pytest.raises(CommandFailure):
sftp_cmd()
@pytest.fixture
def command_output_and_expected_result_connection_error():
data = """xyz@debian:/home$ sftp -6 fred@192.168.0.102
ssh: Could not resolve hostname 192.168.0.102: Address family for hostname not supported
Couldn't read packet: Connection reset by peer
xyz@debian:/home$"""
result = dict()
return data, result
def test_sftp_raises_permission_denied_error(buffer_connection, command_output_and_expected_result_permission_denied):
command_output, expected_result = command_output_and_expected_result_permission_denied
buffer_connection.remote_inject_response([command_output])
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host='192.168.0.102', user='fred', password='1234',
command='get animals/pets/dog /root/dog')
with pytest.raises(CommandFailure):
sftp_cmd()
@pytest.fixture
def command_output_and_expected_result_permission_denied():
data = """xyz@debian:/home$ sftp fred@192.168.0.102
fred@192.168.0.102's password:
Connected to 192.168.0.102.
sftp>
Fetching /upload/animals/pets/dog to /root/dog
Couldn't open local file "/root/dog" for writing: Permission denied
sftp>
xyz@debian:/home$"""
result = dict()
return data, result
def test_sftp_raises_invalid_command_error(buffer_connection, command_output_and_expected_result_invalid_command):
command_output, expected_result = command_output_and_expected_result_invalid_command
buffer_connection.remote_inject_response([command_output])
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host='192.168.0.102', user='fred', password='1234',
options='-i')
with pytest.raises(CommandFailure):
sftp_cmd()
@pytest.fixture
def command_output_and_expected_result_invalid_command():
data = """xyz@debian:/home$ sftp -i fred@192.168.0.102
usage: sftp [-1246aCfpqrv] [-B buffer_size] [-b batchfile] [-c cipher]
[-D sftp_server_path] [-F ssh_config] [-i identity_file] [-l limit]
[-o ssh_option] [-P port] [-R num_requests] [-S program]
[-s subsystem | sftp_server] host
sftp [user@]host[:file ...]
sftp [user@]host[:dir[/]]
sftp -b batchfile [user@]host
xyz@debian:/home$"""
result = dict()
return data, result
def test_sftp_raises_invalid_option_error(buffer_connection, command_output_and_expected_result_invalid_option):
command_output, expected_result = command_output_and_expected_result_invalid_option
buffer_connection.remote_inject_response([command_output])
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host='192.168.0.102', user='fred', password='1234',
options='-d')
with pytest.raises(CommandFailure):
sftp_cmd()
@pytest.fixture
def command_output_and_expected_result_invalid_option():
data = """xyz@debian:/home$ sftp -d fred@192.168.0.102
unknown option -- d
usage: sftp [-1246aCfpqrv] [-B buffer_size] [-b batchfile] [-c cipher]
[-D sftp_server_path] [-F ssh_config] [-i identity_file] [-l limit]
[-o ssh_option] [-P port] [-R num_requests] [-S program]
[-s subsystem | sftp_server] host
sftp [user@]host[:file ...]
sftp [user@]host[:dir[/]]
sftp -b batchfile [user@]host
xyz@debian:/home$"""
result = dict()
return data, result
def test_sftp_raises_ssh_error(buffer_connection, command_output_and_expected_result_ssh_error):
command_output, expected_result = command_output_and_expected_result_ssh_error
buffer_connection.remote_inject_response([command_output])
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host='192.168.0.103', user='fred', password='1234')
with pytest.raises(CommandFailure):
sftp_cmd()
@pytest.fixture
def command_output_and_expected_result_ssh_error():
data = """xyz@debian:/home$ sftp fred@192.168.0.103
ssh: connect to host 192.168.0.103 port 22: No route to host
Couldn't read packet: Connection reset by peer
xyz@debian:/home$ """
result = dict()
return data, result
def test_sftp_raise_not_confirmed_connection(buffer_connection, command_output_and_expected_result_not_confirmed):
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host='192.168.0.102', user='fred', password='1234',
confirm_connection=False, command="mkdir", no_result=True)
assert "sftp fred@192.168.0.102" == sftp_cmd.command_string
sftp_cmd.start()
command_output, expected_result = command_output_and_expected_result_not_confirmed
time.sleep(0.2)
for output in command_output:
buffer_connection.moler_connection.data_received(output.encode("utf-8"), datetime.datetime.now())
with pytest.raises(CommandFailure):
sftp_cmd.await_done(timeout=2)
@pytest.fixture
def command_output_and_expected_result_not_confirmed():
output1 = """xyz@debian:/home$ sftp fred@192.168.0.102
The authenticity of host '192.168.0.102 (192.168.0.102)' can't be established.
ECDSA key fingerprint is SHA256:ghQ3iy/gH4YTqZOggql1eJCe3EETOOpn5yANJwFeRt0.
Are you sure you want to continue connecting (yes/no)?"""
output2 = """Are you sure you want to continue connecting (yes/no)? no
Host key verification failed.
xyz@debian:/home$ """
outputs = [output1, output2]
result = {}
return outputs, result
def test_sftp_returns_result_pwd_in_prompt(buffer_connection, command_output_and_expected_result_pwd_in_prompt):
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host='192.168.0.102', user='fred', password='1234',
command='pwd')
assert "sftp fred@192.168.0.102" == sftp_cmd.command_string
command_output, expected_result = command_output_and_expected_result_pwd_in_prompt
sftp_cmd.start()
time.sleep(0.1)
for output in command_output:
buffer_connection.moler_connection.data_received(output.encode("utf-8"), datetime.datetime.now())
sftp_cmd.await_done()
assert sftp_cmd.current_ret == expected_result
assert sftp_cmd.done() is True
@pytest.fixture
def command_output_and_expected_result_pwd_in_prompt():
output1 = """xyz@debian:/home$ sftp fred@192.168.0.102
The authenticity of host '192.168.0.102 (192.168.0.102)' can't be established.
ECDSA key fingerprint is SHA256:ghQ3iy/gH4YTqZOggql1eJCe3EETOOpn5yANJwFeRt0.
Are you sure you want to continue connecting (yes/no)?"""
output2 = """Are you sure you want to continue connecting (yes/no)? yes
Warning: Permanently added '192.168.0.102' (ECDSA) to the list of known hosts.
fred@192.168.0.102's password:"""
output3 = """fred@192.168.0.102's password:
Connected to 192.168.0.102.
sftp>"""
output4 = """
sftp> pwd
Remote working directory: /upload
sftp>"""
output5 = """
sftp>
sftp> exit"""
output6 = """sftp> exit
xyz@debian:/home$"""
outputs = [output1, output2, output3, output4, output5, output6]
result = {'RESULT': ["Remote working directory: /upload"]}
return outputs, result
def test_sftp_no_result(buffer_connection, command_output_and_expected_result_no_result):
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host='192.168.0.102', user='fred', password='1234',
command="mkdir pet", no_result=True)
assert "sftp fred@192.168.0.102" == sftp_cmd.command_string
command_output, expected_result = command_output_and_expected_result_no_result
sftp_cmd.start(timeout=1)
time.sleep(0.1)
for output in command_output:
buffer_connection.moler_connection.data_received(output.encode("utf-8"), datetime.datetime.now())
assert sftp_cmd.current_ret == expected_result
sftp_cmd.await_done()
assert sftp_cmd.done() is True
@pytest.fixture
def command_output_and_expected_result_no_result():
output1 = """xyz@debian:/home$ sftp fred@192.168.0.102
fred@192.168.0.102's password:"""
output2 = """fred@192.168.0.102's password:
Connected to 192.168.0.102.
sftp>"""
output3 = "\n"
output4 = ""
output5 = "\n"
output6 = "sftp> mkdir pet\n"
output7 = " mkdir pet\n"
output8 = "\t \t"
output9 = "\n"
output10 = "sftp>"
output11 = "\n"
output12 = "sftp> exit\n"
output13 = "sftp> exit\n"
output14 = "xyz@debian:/home$"
outputs = [output1, output2, output3, output4, output5, output6, output7, output8, output9, output10, output11,
output12, output13, output14]
result = {}
return outputs, result
def test_sftp_returns_result_of_fetching_file_with_progress_bar(buffer_connection,
command_output_and_expected_result_progress_bar):
sftp_cmd = Sftp(connection=buffer_connection.moler_connection, host='192.168.0.102', user='fred', password='1234',
source_path="debian-9.5.0-i386-netinst.iso")
assert "sftp fred@192.168.0.102:debian-9.5.0-i386-netinst.iso" == sftp_cmd.command_string
command_output, expected_result = command_output_and_expected_result_progress_bar
sftp_cmd.start()
time.sleep(0.1)
for output in command_output:
buffer_connection.moler_connection.data_received(output.encode("utf-8"), datetime.datetime.now())
sftp_cmd.await_done()
assert sftp_cmd.current_ret == expected_result
assert sftp_cmd.done() is True
@pytest.fixture
def command_output_and_expected_result_progress_bar():
output1 = """xyz@debian:/home$ sftp fred@192.168.0.102:debian-9.5.0-i386-netinst.iso
fred@192.168.0.102's password:"""
output2 = """fred@192.168.0.102's password:
Connected to 192.168.0.102.
Fetching /upload/debian-9.5.0-i386-netinst.iso to /home/debian-9.5.0-i386-netinst.iso
/upload/debian-9.5.0-i386-netinst.iso 0% 0 0.0KB/s --:-- ETA
/upload/debian-9.5.0-i386-netinst.iso 10% 38MB 37.7MB/s 00:08 ETA
/upload/debian-9.5.0-i386-netinst.iso 18% 69MB 37.1MB/s 00:08 ETA
/upload/debian-9.5.0-i386-netinst.iso 26% 102MB 36.6MB/s 00:07 ETA
/upload/debian-9.5.0-i386-netinst.iso 34% 130MB 35.8MB/s 00:06 ETA
/upload/debian-9.5.0-i386-netinst.iso 42% 159MB 35.1MB/s 00:06 ETA
/upload/debian-9.5.0-i386-netinst.iso 50% 191MB 34.8MB/s 00:05 ETA
/upload/debian-9.5.0-i386-netinst.iso 58% 219MB 34.2MB/s 00:04 ETA
/upload/debian-9.5.0-i386-netinst.iso 66% 252MB 34.0MB/s 00:03 ETA
/upload/debian-9.5.0-i386-netinst.iso 74% 282MB 33.6MB/s 00:02 ETA
/upload/debian-9.5.0-i386-netinst.iso 83% 313MB 33.4MB/s 00:01 ETA
/upload/debian-9.5.0-i386-netinst.iso 90% 341MB 32.8MB/s 00:01 ETA
/upload/debian-9.5.0-i386-netinst.iso 97% 367MB 32.1MB/s 00:00 ETA
/upload/debian-9.5.0-i386-netinst.iso 100% 377MB 30.4MB/s 00:12
xyz@debian:/home$"""
outputs = [output1, output2]
result = {'RESULT': ['Fetching /upload/debian-9.5.0-i386-netinst.iso to /home/debian-9.5.0-i386-netinst.iso',
'/upload/debian-9.5.0-i386-netinst.iso 100%'
' 377MB 30.4MB/s 00:12']}
return outputs, result
| 44.921182
| 120
| 0.6958
| 2,524
| 18,238
| 4.797147
| 0.110935
| 0.069789
| 0.035266
| 0.047902
| 0.871655
| 0.84853
| 0.841262
| 0.790221
| 0.751569
| 0.682772
| 0
| 0.07656
| 0.18785
| 18,238
| 405
| 121
| 45.032099
| 0.740886
| 0.002632
| 0
| 0.537037
| 0
| 0.089506
| 0.403916
| 0.073754
| 0
| 0
| 0
| 0
| 0.049383
| 1
| 0.095679
| false
| 0.101852
| 0.015432
| 0
| 0.151235
| 0.009259
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
becf43ecf5103e0cdadb99bbb85072ba99c78aaf
| 9,380
|
py
|
Python
|
commonroad/tests/geometry/test_shape.py
|
CommonRoad/commonroad-io
|
93824961da9c41eb7768b5cf1acbed9a07446dc2
|
[
"BSD-3-Clause"
] | 3
|
2022-01-05T09:10:18.000Z
|
2022-03-22T15:09:43.000Z
|
commonroad/tests/geometry/test_shape.py
|
CommonRoad/commonroad-io
|
93824961da9c41eb7768b5cf1acbed9a07446dc2
|
[
"BSD-3-Clause"
] | null | null | null |
commonroad/tests/geometry/test_shape.py
|
CommonRoad/commonroad-io
|
93824961da9c41eb7768b5cf1acbed9a07446dc2
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
import unittest
from commonroad.geometry.shape import Rectangle, Circle, Polygon, ShapeGroup
__author__ = "Moritz Untersperger"
__copyright__ = "TUM Cyber-Physical Systems Group"
__credits__ = ["Priority Program SPP 1835 Cooperative Interacting Automobiles"]
__version__ = "2022.1"
__maintainer__ = "Moritz Untersperger"
__email__ = "commonroad@lists.lrz.de"
__status__ = "Released"
class TestRectangle(unittest.TestCase):
def test_rotate_90deg(self):
translation = np.array([0.0, 0.0])
rotation_angle = np.pi/2
homogeneous_initial_vector = Rectangle(1, 1)
expected_transposed_vertices = np.array([[0.5, -0.5], [-0.5, -0.5], [-0.5, 0.5], [0.5, 0.5], [0.5, -0.5]])
transposed_vector = homogeneous_initial_vector.translate_rotate(translation, rotation_angle)
for tv, etv in zip(transposed_vector.vertices, expected_transposed_vertices):
for tv_item, etv_item in zip(tv, etv):
self.assertAlmostEqual(tv_item, etv_item)
def test_translate(self):
translation = np.array([5.52, -2.2])
rotation_angle = 0
homogeneous_initial_vector = Rectangle(1, 1)
expected_transposed_vertices = np.array([[5.02, -2.7], [5.02, -1.7], [6.02, -1.7], [6.02, -2.7], [5.02, -2.7]])
transposed_vector = homogeneous_initial_vector.translate_rotate(translation, rotation_angle)
for tv, etv in zip(transposed_vector.vertices, expected_transposed_vertices):
for tv_item, etv_item in zip(tv, etv):
self.assertAlmostEqual(tv_item, etv_item)
def test_contains_point(self):
initial_rectangle = Rectangle(1, 1)
expected_contained_point = np.array([0, 0])
self.assertTrue(initial_rectangle.contains_point(expected_contained_point))
def test_side_effect(self):
# until commonroad-io 2021.4 there was a side effect where rectangle_2.center == np.ndarray([1.0, 0.0])
rectangle_1 = Rectangle(length=4.0, width=2.0, orientation=1.0)
rectangle_1.center[0] = 1.0
rectangle_2 = Rectangle(length=4.0, width=2.0, orientation=1.0)
np.testing.assert_array_equal(np.array([0.0, 0.0]), rectangle_2.center)
def test__compute_vertices(self):
initial_rectangle = Rectangle(1, 1)
expected_computed_vertices = np.array([[-0.5, -0.5], [-0.5, 0.5], [0.5, 0.5], [0.5, -0.5], [-0.5, -0.5]])
computed_vertices = initial_rectangle._compute_vertices()
for cv, ecv in zip(computed_vertices, expected_computed_vertices):
for cv_item, ecv_item in zip(cv, ecv):
self.assertAlmostEqual(cv_item, ecv_item)
class TestCircle(unittest.TestCase):
def test_rotate_90deg(self):
translation = np.array([0.0, 0.0])
rotation_angle = np.pi / 2
initial_circle = Circle(2)
expected_transposed_center = np.array([0.0, 0.0])
transposed_circle = initial_circle.translate_rotate(translation, rotation_angle)
for tc, etc in zip(transposed_circle.center, expected_transposed_center):
self.assertAlmostEqual(tc, etc)
def test_translate(self):
translation = np.array([5.52, -2.2])
rotation_angle = 0
initial_circle = Circle(2)
expected_transposed_center = np.array([5.52, -2.2])
transposed_circle = initial_circle.translate_rotate(translation, rotation_angle)
for tc, etc in zip(transposed_circle.center, expected_transposed_center):
self.assertAlmostEqual(tc, etc)
def test_side_effect(self):
# until commonroad-io 2021.4 there was a side effect where circle_2.center == np.ndarray([1.0, 0.0])
circle_1 = Circle(radius=1.0)
circle_1.center[0] = 1.0
circle_2 = Circle(radius=1.0)
np.testing.assert_array_equal(np.array([0.0, 0.0]), circle_2.center)
def test_contains_point(self):
initial_circle = Rectangle(0, 0)
expected_contained_point = np.array([0, 0])
self.assertTrue(initial_circle.contains_point(expected_contained_point))
class TestPolygon(unittest.TestCase):
def test_rotate_90deg(self):
translation = np.array([0.0, 0.0])
rotation_angle = np.pi / 2
homogeneous_initial_vector = Polygon(np.array([[-0.5, -0.5], [0.5, -0.5], [0.5, 0.5], [-0.5, 0.5]]))
expected_transposed_vertices = np.array([[0.5, -0.5], [-0.5, -0.5], [-0.5, 0.5], [0.5, 0.5], [0.5, -0.5]])
transposed_vector = homogeneous_initial_vector.translate_rotate(translation, rotation_angle)
for tv, etv in zip(transposed_vector.vertices, expected_transposed_vertices):
for tv_item, etv_item in zip(tv, etv):
self.assertAlmostEqual(tv_item, etv_item)
def test_translate(self):
translation = np.array([5.52, -2.2])
rotation_angle = 0
homogeneous_initial_vector = Polygon(np.array([[-0.5, -0.5], [0.5, -0.5], [0.5, 0.5], [-0.5, 0.5]]))
expected_transposed_vertices = np.array([[5.02, -2.7], [5.02, -1.7], [6.02, -1.7], [6.02, -2.7], [5.02, -2.7]])
transposed_vector = homogeneous_initial_vector.translate_rotate(translation, rotation_angle)
for tv, etv in zip(transposed_vector.vertices, expected_transposed_vertices):
for tv_item, etv_item in zip(tv, etv):
self.assertAlmostEqual(tv_item, etv_item)
def test_contains_point(self):
initial_rectangle = Polygon(np.array([[-0.5, -0.5], [0.5, -0.5], [0.5, 0.5], [-0.5, 0.5]]))
expected_contained_point = np.array([0, 0])
self.assertTrue(initial_rectangle.contains_point(expected_contained_point))
class TestShapeGroup(unittest.TestCase):
def test_rotate_90deg(self):
translation = np.array([0.0, 0.0])
rotation_angle = np.pi / 2
initial_shape_one = Polygon(np.array([[-0.5, -0.5], [0.5, -0.5], [0.5, 0.5], [-0.5, 0.5]]))
initial_shape_two = Rectangle(1, 1)
initial_shape_three = Circle(2)
shape_group = list()
shape_group.append(initial_shape_one)
shape_group.append(initial_shape_two)
shape_group.append(initial_shape_three)
shape_group = ShapeGroup(shape_group)
expected_transposed_vertices_one = np.array([[0.5, -0.5], [-0.5, -0.5], [-0.5, 0.5], [0.5, 0.5], [0.5, -0.5]])
expected_transposed_vertices_two = np.array([[0.5, -0.5], [-0.5, -0.5], [-0.5, 0.5], [0.5, 0.5], [0.5, -0.5]])
expected_transposed_center_three = np.array([0.0, 0.0])
expected_transposed = list()
expected_transposed.append(expected_transposed_vertices_one)
expected_transposed.append(expected_transposed_vertices_two)
expected_transposed.append(expected_transposed_center_three)
shape_group = shape_group.translate_rotate(translation, rotation_angle)
for sg, et in zip(shape_group.shapes[0:2], expected_transposed[0:2]):
for sg1, et1 in zip(sg.vertices, et):
for sg1_item, et1_item in zip(sg1, et1):
self.assertAlmostEqual(sg1_item, et1_item)
for sg, et in zip(shape_group.shapes[2].center, expected_transposed[2]):
self.assertAlmostEqual(sg, et)
def test_translate(self):
translation = np.array([5.52, -2.2])
rotation_angle = 0
initial_shape_one = Polygon(np.array([[-0.5, -0.5], [0.5, -0.5], [0.5, 0.5], [-0.5, 0.5]]))
initial_shape_two = Rectangle(1, 1)
initial_shape_three = Circle(2)
shape_group = list()
shape_group.append(initial_shape_one)
shape_group.append(initial_shape_two)
shape_group.append(initial_shape_three)
shape_group = ShapeGroup(shape_group)
expected_transposed_vertices_one = np.array([[5.02, -2.7], [5.02, -1.7], [6.02, -1.7], [6.02, -2.7], [5.02, -2.7]])
expected_transposed_vertices_two = np.array([[5.02, -2.7], [5.02, -1.7], [6.02, -1.7], [6.02, -2.7], [5.02, -2.7]])
expected_transposed_center_three = np.array([5.52, -2.2])
expected_transposed = list()
expected_transposed.append(expected_transposed_vertices_one)
expected_transposed.append(expected_transposed_vertices_two)
expected_transposed.append(expected_transposed_center_three)
shape_group = shape_group.translate_rotate(translation, rotation_angle)
for sg, et in zip(shape_group.shapes[0:2], expected_transposed[0:2]):
for sg1, et1 in zip(sg.vertices, et):
for sg1_item, et1_item in zip(sg1, et1):
self.assertAlmostEqual(sg1_item, et1_item)
for sg, et in zip(shape_group.shapes[2].center, expected_transposed[2]):
self.assertAlmostEqual(sg, et)
def test_contains_point(self):
expected_contained_point = np.array([0, 0])
initial_shape_one = Polygon(np.array([[-0.5, -0.5], [0.5, -0.5], [0.5, 0.5], [-0.5, 0.5]]))
initial_shape_two = Rectangle(1, 1)
initial_shape_three = Circle(2)
shape_group = list()
shape_group.append(initial_shape_one)
shape_group.append(initial_shape_two)
shape_group.append(initial_shape_three)
shape_group = ShapeGroup(shape_group)
self.assertTrue(shape_group.contains_point(expected_contained_point))
if __name__ == '__main__':
unittest.main()
| 41.875
| 123
| 0.651279
| 1,346
| 9,380
| 4.291976
| 0.08841
| 0.033928
| 0.045179
| 0.060239
| 0.873291
| 0.860827
| 0.835901
| 0.822053
| 0.815129
| 0.799896
| 0
| 0.066216
| 0.211087
| 9,380
| 224
| 124
| 41.875
| 0.714459
| 0.021322
| 0
| 0.734177
| 0
| 0
| 0.019176
| 0.002506
| 0
| 0
| 0
| 0
| 0.107595
| 1
| 0.094937
| false
| 0
| 0.018987
| 0
| 0.139241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
beeaa2abac98ef07ae9cd63af46a783889866cd4
| 18,038
|
py
|
Python
|
ExampleCases/OpFAST_FLORIS_WF3x1/plotyaw_RIS.py
|
tonino102008/openfast
|
cfb401af163f4e0b6bb8588c23374e1534ad8d87
|
[
"Apache-2.0"
] | null | null | null |
ExampleCases/OpFAST_FLORIS_WF3x1/plotyaw_RIS.py
|
tonino102008/openfast
|
cfb401af163f4e0b6bb8588c23374e1534ad8d87
|
[
"Apache-2.0"
] | null | null | null |
ExampleCases/OpFAST_FLORIS_WF3x1/plotyaw_RIS.py
|
tonino102008/openfast
|
cfb401af163f4e0b6bb8588c23374e1534ad8d87
|
[
"Apache-2.0"
] | 1
|
2021-02-05T17:50:01.000Z
|
2021-02-05T17:50:01.000Z
|
import matplotlib.pyplot as plt
import numpy
import pandas as pd
import control.matlab as cnt
import cp
import scipy.optimize as optim
dfdata = pd.read_csv('t1.T1.out', sep='\t', header=None, skiprows=10)
datadata = dfdata.values
dfdata2 = pd.read_csv('t2.T2.out', sep='\t', header=None, skiprows=10)
datadata2 = dfdata2.values
dfdata3 = pd.read_csv('t3.T3.out', sep='\t', header=None, skiprows=10)
datadata3 = dfdata3.values
iT = 0
nT = 3
nend = 30000
df = pd.read_csv('EPOWER.txt', header=None)
data = df.values[iT::nT,:]
df6 = pd.read_csv('ECROSS.txt', header=None)
data6 = df6.values[iT::nT,:]
df8 = pd.read_csv('EWIND.txt', header=None)
data8 = df8.values[iT::nT,:]
plt.plot(datadata[:,0], datadata[:,23], 'b', label = 'T1')
plt.plot(datadata2[:,0], datadata2[:,23], 'r', label = 'T2')
plt.plot(datadata3[:,0], datadata3[:,23], 'g', label = 'T3')
plt.title("Ct", fontsize = 20)
plt.ylabel("Thrust Coefficient", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.legend(fontsize = 20)
plt.ylim(0.6, 1)
plt.show()
nn = min([len(data6[:,0]), len(data8[:,0]), len(datadata[:,0]), len(datadata2[:,0]), len(datadata3[:,0])])
print(datadata3[:nn:,51])
plt.plot(datadata[:nn:,0], datadata[:nn:,51], 'b',label='T1')
plt.plot(datadata2[:nn:,0], datadata2[:nn:,51], 'r',label='T2')
plt.plot(datadata3[:nn:,0], datadata3[:nn:,51], 'g',label='T3')
plt.title("Cp", fontsize = 20)
plt.ylabel("Cp", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.ylim(0, 0.6)
plt.legend(fontsize = 20)
plt.show()
nn = min([len(data6[:,0]), len(data8[:,0]), len(datadata[:,0])])
Wind_mag = numpy.power(numpy.power(datadata[:nn:,1], 2) + numpy.power(datadata[:nn:,2], 2), 0.5)
fig, axes = plt.subplots(2,sharex = True)
axes[0].plot(datadata[:nn:,0], numpy.arctan2(datadata[:nn:,2] - numpy.multiply(Wind_mag, numpy.sin(datadata[:nn:,21] * numpy.pi/180.0)), datadata[:,1])*180/numpy.pi,'r', label = 'T1 S.O.R.')
axes[0].plot(datadata[:nn:,0], numpy.arctan2(datadata[:nn:,2], datadata[:nn:,1])*180.0/numpy.pi, 'g', label = 'WIND FARM S.O.R.')
axes[0].set_title("Wind Direction", fontsize = 20)
axes[0].set_ylabel("Wind Direction (deg)", fontsize = 20)
#axes[0].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[0].legend(fontsize = 20)
axes[0].tick_params(axis="x", labelsize=20)
axes[0].tick_params(axis="y", labelsize=20)
axes[1].plot(datadata[:,0], numpy.ones(len(Wind_mag))*9.2694, 'b')
axes[1].set_title("T1 Wind Speed \n Magnitude", fontsize = 20)
axes[1].set_ylabel("T1 Wind Speed (m/s)", fontsize = 20)
axes[1].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[1].tick_params(axis="x", labelsize=20)
axes[1].tick_params(axis="y", labelsize=20)
plt.show()
nn = min([len(data6[:,0]), len(data8[:,0]), len(datadata[:,0])])
Wind_mag2 = numpy.power(numpy.power(datadata2[:nn:,1], 2) + numpy.power(datadata2[:nn:,2], 2), 0.5)
fig, axes = plt.subplots(2,sharex = True)
axes[0].plot(datadata2[:nn:,0], numpy.arctan2(datadata2[:nn:,2] - numpy.multiply(Wind_mag2, numpy.sin(datadata2[:nn:,21] * numpy.pi/180.0)), datadata2[:,1])*180/numpy.pi,'r', label = 'T2 S.O.R.')
axes[0].plot(datadata2[:nn:,0], numpy.arctan2(datadata2[:nn:,2], datadata2[:nn:,1])*180.0/numpy.pi, 'g', label = 'WIND FARM S.O.R.')
axes[0].set_title("Wind Direction", fontsize = 20)
axes[0].set_ylabel("Wind Direction (deg)", fontsize = 20)
#axes[0].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[0].legend(fontsize = 20)
axes[0].tick_params(axis="x", labelsize=20)
axes[0].tick_params(axis="y", labelsize=20)
axes[1].plot(datadata2[:,0], Wind_mag2, 'b')
axes[1].set_title("T2 Wind Speed \n Magnitude", fontsize = 20)
axes[1].set_ylabel("T2 Wind Speed (m/s)", fontsize = 20)
axes[1].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[1].tick_params(axis="x", labelsize=20)
axes[1].tick_params(axis="y", labelsize=20)
plt.show()
nn = min([len(data6[:,0]), len(data8[:,0]), len(datadata[:,0])])
Wind_mag3 = numpy.power(numpy.power(datadata3[:nn:,1], 2) + numpy.power(datadata3[:nn:,2], 2), 0.5)
fig, axes = plt.subplots(2,sharex = True)
axes[0].plot(datadata3[:nn:,0], numpy.arctan2(datadata3[:nn:,2] - numpy.multiply(Wind_mag3, numpy.sin(datadata3[:nn:,21] * numpy.pi/180.0)), datadata3[:,1])*180/numpy.pi,'r', label = 'T3 S.O.R.')
axes[0].plot(datadata3[:nn:,0], numpy.arctan2(datadata3[:nn:,2], datadata3[:nn:,1])*180.0/numpy.pi, 'g', label = 'WIND FARM S.O.R.')
axes[0].set_title("Wind Direction", fontsize = 20)
axes[0].set_ylabel("Wind Direction (deg)", fontsize = 20)
#axes[0].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[0].legend(fontsize = 20)
axes[0].tick_params(axis="x", labelsize=20)
axes[0].tick_params(axis="y", labelsize=20)
axes[1].plot(datadata3[:,0], Wind_mag3, 'b')
axes[1].set_title("T3 Wind Speed \n Magnitude", fontsize = 20)
axes[1].set_ylabel("T3 Wind Speed (m/s)", fontsize = 20)
axes[1].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[1].tick_params(axis="x", labelsize=20)
axes[1].tick_params(axis="y", labelsize=20)
plt.show()
nn = min([len(data6[:,0]), len(data8[:,0]), len(datadata[:,0])])
Wind_mag = numpy.power(numpy.power(datadata[:nn:,1], 2) + numpy.power(datadata[:nn:,2], 2), 0.5)
plt.plot(data6[:nn:,2], numpy.arctan2(data6[:nn:,0], data8[:nn:,0])*180/numpy.pi - numpy.arctan2(datadata[:nn:,2] - numpy.multiply(Wind_mag, numpy.sin(datadata[:nn:,21] * numpy.pi/180.0)), datadata[:,1])*180/numpy.pi , 'b')
plt.title("ERROR OF ORIGINAL CROSS WIND ESTIMATE YAW ERROR", fontsize = 20)
plt.xlabel("SIMULATED TIME (s)", fontsize = 20)
plt.ylabel("WIND RELATIVE YAW ERROR", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.ylim(-5,5)
plt.show()
nn = min([len(data6[:,0]), len(data8[:,0]), len(datadata[:,0])])
Wind_mag = numpy.power(numpy.power(datadata[:nn:,1], 2) + numpy.power(datadata[:nn:,2], 2), 0.5)
plt.plot(data6[:nn:,2], numpy.arctan2(data6[:nn:,1], data8[:nn:,0])*180/numpy.pi - numpy.arctan2(datadata[:nn:,2] - numpy.multiply(Wind_mag, numpy.sin(datadata[:nn:,21] * numpy.pi/180.0)), datadata[:,1])*180/numpy.pi , 'b')
plt.title("ERROR OF MODIFIED CROSS WIND ESTIMATE YAW ERROR", fontsize = 20)
plt.xlabel("SIMULATED TIME (s)", fontsize = 20)
plt.ylabel("WIND RELATIVE YAW ERROR", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.ylim(-5,5)
plt.show()
nn = min([len(data6[:,0]), len(data8[:,0]), len(datadata[:,0]), len(datadata2[:,0]), len(datadata3[:,0])])
plt.plot(datadata[:nn:,0], datadata[:nn:,52], 'b')
plt.title("Cp T1", fontsize = 20)
plt.ylabel("Cp", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.ylim(0, 0.6)
plt.show()
nn = min([len(data6[:,0]), len(data8[:,0]), len(datadata[:,0]), len(datadata2[:,0]), len(datadata3[:,0])])
plt.plot(data8[:nn:,1], numpy.multiply(data8[:nn:,0] - datadata[:nn:,1], 1/datadata[:nn:,1]), 'b')
plt.title("Wind Observer T1 Error", fontsize = 20)
plt.ylabel("Wind Axial Speed Error", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.ylim(-1, 1)
plt.show()
nn = min([len(data6[:,0]), len(data8[:,0]), len(datadata[:,0]), len(datadata2[:,0]), len(datadata3[:,0])])
plt.plot(data8[:nn:,1], numpy.multiply(data8[:nn:,0] - datadata2[:nn:,1], 1/datadata2[:nn:,1]), 'b')
plt.title("Wind Observer T2 Error", fontsize = 20)
plt.ylabel("Wind Axial Speed Error", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.ylim(-1, 1)
plt.show()
nn = min([len(data6[:,0]), len(data8[:,0]), len(datadata[:,0]), len(datadata2[:,0]), len(datadata3[:,0])])
plt.plot(data8[:nn:,1], numpy.multiply(data8[:nn:,0] - datadata3[:nn:,1], 1/datadata3[:nn:,1]), 'b')
plt.title("Wind Observer T3 Error", fontsize = 20)
plt.ylabel("Wind Axial Speed Error", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.ylim(-1, 1)
plt.show()
plt.plot(datadata[:,0], datadata[:,1], 'r', label = 'Real')
plt.plot(data8[:,1], data8[:,0], 'b', label = 'Observed')
plt.title("Wind Observer T1", fontsize = 20)
plt.ylabel("Wind Axial Speed (m/s)", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.legend(fontsize = 20)
plt.ylim(0, 15)
plt.show()
plt.plot(datadata2[:,0], datadata2[:,1], 'r', label = 'Real')
plt.plot(data8[:,1], data8[:,0], 'b', label = 'Observed')
plt.title("Wind Observer T2", fontsize = 20)
plt.ylabel("Wind Axial Speed (m/s)", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.legend(fontsize = 20)
plt.ylim(0, 15)
plt.show()
plt.plot(datadata3[:,0], datadata3[:,1], 'r', label = 'Real')
plt.plot(data8[:,1], data8[:,0], 'b', label = 'Observed')
plt.title("Wind Observer T3", fontsize = 20)
plt.ylabel("Wind Axial Speed (m/s)", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.legend(fontsize = 20)
plt.ylim(0, 15)
plt.show()
plt.plot(datadata[:,0], datadata[:,23], 'b', label = 'T1')
plt.plot(datadata2[:,0], datadata2[:,23], 'r', label = 'T2')
plt.plot(datadata3[:,0], datadata3[:,23], 'g', label = 'T3')
plt.title("Ct", fontsize = 20)
plt.ylabel("Thrust Coefficient", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.legend(fontsize = 20)
plt.ylim(0.6, 1)
plt.show()
nn = min([len(data6[:,0]), len(data8[:,0]), len(datadata[:,0])])
Wind_mag = numpy.power(numpy.power(datadata[:nn:,1], 2) + numpy.power(datadata[:nn:,2], 2), 0.5)
plt.plot(datadata[:nn:,0], numpy.arctan2(datadata[:nn:,2] - numpy.multiply(Wind_mag, numpy.sin(datadata[:nn:,21] * numpy.pi/180.0)), datadata[:,1])*180/numpy.pi,'r', label = 'T1 S.O.R.')
plt.plot(datadata[:nn:,0], numpy.arctan2(datadata[:nn:,2], datadata[:nn:,1])*180.0/numpy.pi, 'g', label = 'WIND FARM S.O.R.')
plt.title("WIND DIRECTION", fontsize = 20)
plt.xlabel("SIMULATED TIME (s)", fontsize = 20)
plt.ylabel("WIND RELATIVE YAW ERROR (deg)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.ylim(-35,35)
plt.legend(fontsize = 20)
plt.show()
fig, axes = plt.subplots(2,sharex = True)
axes[0].plot(datadata[:nn:,0], numpy.arctan2(datadata[:nn:,2] - numpy.multiply(Wind_mag, numpy.sin(datadata[:nn:,21] * numpy.pi/180.0)), datadata[:,1])*180/numpy.pi,'r', label = 'T1 S.O.R.')
axes[0].plot(datadata[:nn:,0], numpy.arctan2(datadata[:nn:,2], datadata[:nn:,1])*180.0/numpy.pi, 'g', label = 'WIND FARM S.O.R.')
axes[0].set_title("Wind Direction", fontsize = 20)
axes[0].set_ylabel("Wind Direction (deg)", fontsize = 20)
#axes[0].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[0].legend(fontsize = 20)
axes[0].tick_params(axis="x", labelsize=20)
axes[0].tick_params(axis="y", labelsize=20)
axes[1].plot(datadata[:,0], datadata[:,1], 'b')
axes[1].set_title("T1 Wind Speed \n X-axis of Reference Farm Layout", fontsize = 20)
axes[1].set_ylabel("T1 Wind Speed (m/s)", fontsize = 20)
axes[1].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[1].tick_params(axis="x", labelsize=20)
axes[1].tick_params(axis="y", labelsize=20)
plt.show()
nn = min([len(data6[:,0]), len(data8[:,0]), len(datadata[:,0])])
Wind_mag = numpy.power(numpy.power(datadata[:nn:,1], 2) + numpy.power(datadata[:nn:,2], 2), 0.5)
plt.plot(data6[:nn:,2], numpy.arctan2(data6[:nn:,0], data8[:nn:,0])*180/numpy.pi, 'b', label = 'Original')
plt.plot(data6[:nn:,2], numpy.arctan2(data6[:nn:,1], data8[:nn:,0])*180/numpy.pi, 'y', label = 'NN modified')
plt.plot(datadata[:nn:,0], numpy.arctan2(datadata[:nn:,2] - numpy.multiply(Wind_mag, numpy.sin(datadata[:nn:,21] * numpy.pi/180.0)), datadata[:,1])*180/numpy.pi,'r', label = 'Relative S.o.R.')
plt.plot(datadata[:nn:,0], numpy.arctan2(datadata[:nn:,2], datadata[:nn:,1])*180.0/numpy.pi, 'g', label = 'Absolute S.o.R.')
plt.title("CROSS WIND ESTIMATE YAW ERROR", fontsize = 20)
plt.xlabel("SIMULATED TIME (s)", fontsize = 20)
plt.ylabel("WIND RELATIVE YAW ERROR (deg)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.ylim(-35,35)
plt.legend(fontsize = 20)
plt.show()
fig, axes = plt.subplots(2,sharex = True)
axes[0].plot(datadata[:,0], datadata[:,1], 'b')
axes[0].set_title("Wind X T1", fontsize = 20)
axes[0].set_ylabel("Wind X (m/s)", fontsize = 20)
axes[0].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[0].tick_params(axis="x", labelsize=20)
axes[0].tick_params(axis="y", labelsize=20)
axes[1].plot(datadata[:,0], datadata[:,2], 'r')
axes[1].set_title("Wind Y T1", fontsize = 20)
axes[1].set_ylabel("Wind Y (m/s)", fontsize = 20)
axes[1].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[1].tick_params(axis="x", labelsize=20)
axes[1].tick_params(axis="y", labelsize=20)
plt.show()
plt.plot(datadata[:,0], datadata[:,1], 'b', datadata2[:,0], datadata2[:,1], 'r' ,datadata3[:,0], datadata3[:,1], 'g')
plt.title("Wind U", fontsize = 20)
plt.ylabel("Wind U (m/s)", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.show()
plt.plot(datadata2[:,0], datadata2[:,1], 'b')
plt.title("Wind U", fontsize = 20)
plt.ylabel("Wind U (m/s)", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.show()
plt.plot(datadata3[:,0], datadata3[:,1], 'b')
plt.title("Wind U", fontsize = 20)
plt.ylabel("Wind U (m/s)", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.show()
fig, axes = plt.subplots(2,sharex = True)
axes[0].plot(datadata2[:,0], datadata2[:,1], 'b')
axes[0].set_title("Wind X T2", fontsize = 20)
axes[0].set_ylabel("Wind X (m/s)", fontsize = 20)
axes[0].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[0].tick_params(axis="x", labelsize=20)
axes[0].tick_params(axis="y", labelsize=20)
axes[1].plot(datadata2[:,0], datadata2[:,2], 'r')
axes[1].set_title("Wind Y T2", fontsize = 20)
axes[1].set_ylabel("Wind Y (m/s)", fontsize = 20)
axes[1].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[1].tick_params(axis="x", labelsize=20)
axes[1].tick_params(axis="y", labelsize=20)
plt.show()
fig, axes = plt.subplots(2,sharex = True)
axes[0].plot(datadata3[:,0], datadata3[:,1], 'b')
axes[0].set_title("Wind X T3", fontsize = 20)
axes[0].set_ylabel("Wind X (m/s)", fontsize = 20)
axes[0].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[0].tick_params(axis="x", labelsize=20)
axes[0].tick_params(axis="y", labelsize=20)
axes[1].plot(datadata3[:,0], datadata3[:,2], 'r')
axes[1].set_title("Wind Y T3", fontsize = 20)
axes[1].set_ylabel("Wind Y (m/s)", fontsize = 20)
axes[1].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[1].tick_params(axis="x", labelsize=20)
axes[1].tick_params(axis="y", labelsize=20)
plt.show()
fig, axes = plt.subplots(2,sharex = True)
axes[0].plot(datadata[:,0], datadata[:,21], 'b')
axes[0].tick_params(axis="x", labelsize=20)
axes[0].tick_params(axis="y", labelsize=20)
axes[0].set_title("Yaw Position T1", fontsize = 20)
axes[0].set_ylabel("Yaw Angle (deg)", fontsize = 20)
axes[0].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[1].plot(datadata[:,0], datadata[:,22], 'r')
axes[1].set_title("Yaw Speed T1", fontsize = 20)
axes[1].set_ylabel("Yaw Angular Speed (deg/s)", fontsize = 20)
axes[1].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[1].tick_params(axis="x", labelsize=20)
axes[1].tick_params(axis="y", labelsize=20)
plt.show()
fig, axes = plt.subplots(2,sharex = True)
axes[0].plot(datadata2[:,0], datadata2[:,21], 'b')
axes[0].tick_params(axis="x", labelsize=20)
axes[0].tick_params(axis="y", labelsize=20)
axes[0].set_title("Yaw Position T2", fontsize = 20)
axes[0].set_ylabel("Yaw Angle (deg)", fontsize = 20)
axes[0].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[1].plot(datadata2[:,0], datadata2[:,22], 'r')
axes[1].tick_params(axis="x", labelsize=20)
axes[1].tick_params(axis="y", labelsize=20)
axes[1].set_title("Yaw Speed T2", fontsize = 20)
axes[1].set_ylabel("Yaw Angular Speed (deg/s)", fontsize = 20)
axes[1].set_xlabel("Simulated Time (s)", fontsize = 20)
plt.show()
fig, axes = plt.subplots(2,sharex = True)
axes[0].plot(datadata3[:,0], datadata3[:,21], 'b')
axes[0].tick_params(axis="x", labelsize=20)
axes[0].tick_params(axis="y", labelsize=20)
axes[0].set_title("Yaw Position T3", fontsize = 20)
axes[0].set_ylabel("Yaw Angle (deg)", fontsize = 20)
axes[0].set_xlabel("Simulated Time (s)", fontsize = 20)
axes[1].plot(datadata3[:,0], datadata3[:,22], 'r')
axes[1].tick_params(axis="x", labelsize=20)
axes[1].tick_params(axis="y", labelsize=20)
axes[1].set_title("Yaw Speed T3", fontsize = 20)
axes[1].set_ylabel("Yaw Angular Speed (deg/s)", fontsize = 20)
axes[1].set_xlabel("Simulated Time (s)", fontsize = 20)
plt.show()
plt.plot(datadata[:,0], datadata[:,52], 'b', label = 'T1')
plt.plot(datadata2[:,0], datadata2[:,52],'r', label = 'T2')
plt.plot(datadata3[:,0], datadata3[:,52], 'g', label = 'T3')
plt.title("POWER", fontsize = 20)
plt.ylabel("POWER (kW)", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.legend(fontsize = 20)
plt.show()
nn = min([len(datadata[:,0]), len(datadata2[:,0]), len(datadata3[:,0])])
plt.plot(datadata[:nn:,0], datadata[:nn:,52]+datadata2[:nn:,52]+datadata3[:nn:,52],'g')
plt.title("POWER TOTAL", fontsize = 20)
plt.ylabel("POWER TOTAL (kW)", fontsize = 20)
plt.xlabel("Simulated Time (s)", fontsize = 20)
plt.xticks(fontsize=20, rotation=0)
plt.yticks(fontsize=20, rotation=0)
plt.ylim(4000,7000)
plt.show()
| 46.015306
| 223
| 0.672636
| 3,079
| 18,038
| 3.900292
| 0.046444
| 0.139895
| 0.073611
| 0.064951
| 0.923141
| 0.903323
| 0.891665
| 0.868432
| 0.84037
| 0.834957
| 0
| 0.075831
| 0.08981
| 18,038
| 392
| 224
| 46.015306
| 0.655622
| 0.012196
| 0
| 0.706704
| 0
| 0
| 0.130508
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.01676
| 0
| 0.01676
| 0.002793
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3606280fc6bbe5cc3f938b802f0d0776fa8a0fc6
| 11,231
|
py
|
Python
|
audit_log/tests/snapshots/snap_test_audit_logging.py
|
City-of-Helsinki/atv
|
dca73dab09ab0f3a051a9f691aec5674c6369bde
|
[
"MIT"
] | null | null | null |
audit_log/tests/snapshots/snap_test_audit_logging.py
|
City-of-Helsinki/atv
|
dca73dab09ab0f3a051a9f691aec5674c6369bde
|
[
"MIT"
] | 34
|
2021-05-28T06:23:38.000Z
|
2022-03-08T12:42:01.000Z
|
audit_log/tests/snapshots/snap_test_audit_logging.py
|
City-of-Helsinki/atv
|
dca73dab09ab0f3a051a9f691aec5674c6369bde
|
[
"MIT"
] | 1
|
2021-05-27T10:37:42.000Z
|
2021-05-27T10:37:42.000Z
|
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots["test_log_actor_uuid 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "USER",
"user_id": "7e564b45-527f-4ea6-92c7-3d39ba05733c",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "READ",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "be584b90-b256-46f5-83e1-4e6a0f8b4cc3", "type": "User"},
}
}
snapshots["test_log_anonymous_role[CREATE] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "ANONYMOUS",
"user_id": "",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "CREATE",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
snapshots["test_log_anonymous_role[DELETE] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "ANONYMOUS",
"user_id": "",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "DELETE",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
snapshots["test_log_anonymous_role[READ] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "ANONYMOUS",
"user_id": "",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "READ",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
snapshots["test_log_anonymous_role[UPDATE] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "ANONYMOUS",
"user_id": "",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "UPDATE",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
snapshots["test_log_owner_operation[CREATE] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "OWNER",
"user_id": "7e564b45-527f-4ea6-92c7-3d39ba05733c",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "CREATE",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
snapshots["test_log_owner_operation[DELETE] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "OWNER",
"user_id": "7e564b45-527f-4ea6-92c7-3d39ba05733c",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "DELETE",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
snapshots["test_log_owner_operation[READ] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "OWNER",
"user_id": "7e564b45-527f-4ea6-92c7-3d39ba05733c",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "READ",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
snapshots["test_log_owner_operation[UPDATE] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "OWNER",
"user_id": "7e564b45-527f-4ea6-92c7-3d39ba05733c",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "UPDATE",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
snapshots["test_log_status[FORBIDDEN] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "OWNER",
"user_id": "7e564b45-527f-4ea6-92c7-3d39ba05733c",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "READ",
"origin": "atv",
"status": "FORBIDDEN",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
snapshots["test_log_status[SUCCESS] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "OWNER",
"user_id": "7e564b45-527f-4ea6-92c7-3d39ba05733c",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "READ",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
snapshots["test_log_system_operation[CREATE] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "SYSTEM",
"user_id": "",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "CREATE",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
snapshots["test_log_system_operation[DELETE] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "SYSTEM",
"user_id": "",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "DELETE",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
snapshots["test_log_system_operation[READ] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "SYSTEM",
"user_id": "",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "READ",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
snapshots["test_log_system_operation[UPDATE] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "SYSTEM",
"user_id": "",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "UPDATE",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
snapshots["test_log_user_operation[CREATE] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "USER",
"user_id": "7e564b45-527f-4ea6-92c7-3d39ba05733c",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "CREATE",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "be584b90-b256-46f5-83e1-4e6a0f8b4cc3", "type": "User"},
}
}
snapshots["test_log_user_operation[DELETE] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "USER",
"user_id": "7e564b45-527f-4ea6-92c7-3d39ba05733c",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "DELETE",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "be584b90-b256-46f5-83e1-4e6a0f8b4cc3", "type": "User"},
}
}
snapshots["test_log_user_operation[READ] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "USER",
"user_id": "7e564b45-527f-4ea6-92c7-3d39ba05733c",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "READ",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "be584b90-b256-46f5-83e1-4e6a0f8b4cc3", "type": "User"},
}
}
snapshots["test_log_user_operation[UPDATE] 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "",
"role": "USER",
"user_id": "7e564b45-527f-4ea6-92c7-3d39ba05733c",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "UPDATE",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "be584b90-b256-46f5-83e1-4e6a0f8b4cc3", "type": "User"},
}
}
snapshots["test_log_user_with_backend 1"] = {
"audit_event": {
"actor": {
"ip_address": "192.168.1.1",
"provider": "some.auth.Backend",
"role": "OWNER",
"user_id": "7e564b45-527f-4ea6-92c7-3d39ba05733c",
},
"additional_information": "",
"date_time": "2020-06-01T00:00:00.000Z",
"date_time_epoch": 1590969600000,
"operation": "READ",
"origin": "atv",
"status": "SUCCESS",
"target": {"id": "7e564b45-527f-4ea6-92c7-3d39ba05733c", "type": "User"},
}
}
| 30.519022
| 81
| 0.507702
| 1,080
| 11,231
| 5.090741
| 0.065741
| 0.058203
| 0.068752
| 0.088396
| 0.964714
| 0.964714
| 0.964714
| 0.961622
| 0.961622
| 0.961622
| 0
| 0.186773
| 0.295877
| 11,231
| 367
| 82
| 30.60218
| 0.508472
| 0.00552
| 0
| 0.752187
| 0
| 0
| 0.491223
| 0.237148
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005831
| 0
| 0.005831
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
369966faf37fe25f03b693074cd6e8dbb46b1237
| 143,998
|
py
|
Python
|
test/unit/messages/btc/test_btc_messages_util.py
|
doubleukay/bxgateway
|
ac01fc9475c039cf4255576dd4ecd6bff6c48f69
|
[
"MIT"
] | 21
|
2019-11-06T17:37:41.000Z
|
2022-03-28T07:18:33.000Z
|
test/unit/messages/btc/test_btc_messages_util.py
|
doubleukay/bxgateway
|
ac01fc9475c039cf4255576dd4ecd6bff6c48f69
|
[
"MIT"
] | 4
|
2019-11-06T22:08:00.000Z
|
2021-12-08T06:20:51.000Z
|
test/unit/messages/btc/test_btc_messages_util.py
|
doubleukay/bxgateway
|
ac01fc9475c039cf4255576dd4ecd6bff6c48f69
|
[
"MIT"
] | 10
|
2020-08-05T15:58:16.000Z
|
2022-02-07T23:51:10.000Z
|
from bxcommon.test_utils.abstract_test_case import AbstractTestCase
from bxcommon.utils import convert
from bxcommon.utils.blockchain_utils.btc.btc_common_utils import btc_varint_to_int
from bxgateway.btc_constants import BTC_HDR_COMMON_OFF, BTC_BLOCK_HDR_SIZE
from bxgateway.messages.btc.btc_messages_util import get_next_tx_size
class BtcMessagesUtilTests(AbstractTestCase):
def test_get_next_tx_size(self):
segwit_block_hex = "0b110907626c6f636b00000000000000761701007be2ef740000002061a38b7ac56d7b3697d93618442c260c9d1b1993a5b8d3a33fe6140000000000c2101fd21ed2b31e4e07c605a066f6a7c9fab21c49e02eed3781778daa39eabd214a475c0335011ad84b9f1141010000000001010000000000000000000000000000000000000000000000000000000000000000ffffffff1f03282e1604214a475c00a071844f742400000c2f4d696e696e67436f72652f00000000020000000000000000266a24aa21a9edcfb4ccbe7b5d9497dd2856c64031d29791ac2b606ec8e82adc260898c1eef53e612eb104000000001976a9146d0ca0a7bb477b9327dc7f48431ab4421570e09a88ac0120000000000000000000000000000000000000000000000000000000000000000000000000010000000267d47cfe70b562ca722c5c5d5ca52a3fa0d056e7e7661db8d12c0f1595c10cd0000000006b483045022100d005c2954f9bf9e8ac7c066fe77ce6389149ade3112c607e6f98172c3c6d926702202939a3778032a3856d3c0ab98b945f1cebdaac9ee3b19df360b3221ac8cab6b6012103d06840fd042383b504d910d20a505845713873b24b9966dbd60acf748bc833e9ffffffff3de7fe8f33e73c474a6c8878e2a2ca14dfe78d6006f81f77d776389d27773d2e000000006a47304402206b3f7022c916ac48f6bd09865f72b644fad578cec35c49a0d4e9dac240c55ad702200b2d17db54c99a45012a87cd270c650e5f0e8ce15b84c55ce2361baad85a105f012103d06840fd042383b504d910d20a505845713873b24b9966dbd60acf748bc833e9ffffffff0240420f000000000017a9143c1e28666dfcd79c76fa856c64e4197217272f8c8780fc0a00000000001976a914f12f2c6e408b3cdff1991b8783d1eb428f57814b88ac000000000100000002e8145bc1a8495f9f2dc5112574ee1319f30e9bd6b8c897dc6761c7db178da19c010000006b483045022100ac2e0583388ecba84b9eb6cf6300d4b01ffcd43bf98ee3449c1a93700cc0adef02201b8d46edd63f2580879aa193b2178721561686f950b7f14d6ed776a806c67cff012103d06840fd042383b504d910d20a505845713873b24b9966dbd60acf748bc833e9ffffffff9a33622c4c14ab9862f421e70150e168be588f855a43f14b9bd9ba3efe026bc8010000006b483045022100967afc73528cebf7ac067e5531166aa15638625b900d0ffaee28e3eaf3f49b510220699a4ec1e504300635770a5fa1d8fa5936a8f62931ea95049f3a40d6a4666c4e012103d06840fd042383b504d910d20a505845713873b24b9966dbd60acf748bc833e9ffffffff0240420f000000000017a914ab17cf9a504c530c6ca65564c9faa401086df4f987b8308814000000001976a914f12f2c6e408b3cdff1991b8783d1eb428f57814b88ac000000000200000001dba64d852b4eb5b4f1baf00d6f1838a53d15e356f41106963eb9ec20c77bf829010000006a47304402203dbdc88bf58a934a0b88860dd41de6e639c2ce135d8c86ab7062427b633d17d402207243a2924a755c1b555999f11e847aadc93b17d71cc00526f72b6624376755f6012103f0937a3abc7cf55ee7ede5ecc2a5d8665f4e0f56bf20b4e2bb9a99b2023326f7ffffffff020000000000000000256a23535701966512ea19e958c9644ef61d3aa0a3f5a259770d0f5e790e3aabb51bbc83d36438842000000000001976a914bdad1f4d02035b61fb1d237410e85d8402a1187d88ac000000000100000001dfd3eacf4bbe813a6f74270dd43fa02b195313f482e64478afaea504bd635cbf01000000fd5d0100483045022100de907ffd497cf54ceef155adb2bc5015e0d5cb1847e6db3a6bd1658e46eca77e022032fb6cd66af8b047ebdfba79151a062aa9353990311b5e86e91f7b3862358e9601473044022070b9cbaa95f225d89d6d4210294e1351317393046a86c88d4fe65240fcf975c5022057c7e2d2842d6e33dcc9a43ff135eede351a5df56a06f9fdc0e20f8ff5a4b185014cc95241048aa0d470b7a9328889c84ef0291ed30346986e22558e80c3ae06199391eae21308a00cdcfb34febc0ea9c80dfd16b01f26c7ec67593cb8ab474aca8fa1d7029d4104cf54956634c4d0bdaf00e6b1871c089b7a892d0fecc077f03b91e8d4d146861b0a4fdd237891a9819c878984d4b123f6fe92d9bbc05873a1bb4fe510145bf369410471843c33b2971e4944c73d4500abd6f61f7edf9ec919c408cbe12a6c9132d2cb8ebed8253322760d5ec6081165e0ab68900683de503f1544f03816d47fec699a53aeffffffff02771d1600000000001976a9144e434f5394c0ebf5659a8a4054f2312b724f10a488ac82d6f9010000000017a9145629021f7668d4ec310ac5e99701a6d6cf95eb8f870000000001000000015b1de0c487d74fa4c2c1703599813fa43ec6947da7eabb35a6f11b645010cade000000006b4830450221008ae0b950b302d59fb6807cbe68acab60fbd50b646205880f8b07378bab480173022067cfc333724a624050aaa52ddd695cfeba0184ae9c3e84483d0674414edc604f012103484baa53718094bc00f4d9c48be6d1abb06d282a0e9160910addd9b5c86d4588ffffffff01070c15000000000017a9145629021f7668d4ec310ac5e99701a6d6cf95eb8f87000000000200000000010129e90e1b145dc795ca701cd42cb668f9d1cc2c3a24b26c801f6b20ea4b2723130000000017160014ab47d387fb53e371d777721e7f5283ccd916a9ccfeffffff025833db650800000017a9148154aa08ab384f2a5b937810407d9376ff262d35878d55a7000000000017a914c6953734bacc2f899450175de465f965be0a0f6c8702483045022100c9c7ff70f1bae23ef5104775422c57800dd4a21ebbc1282d9e2d83befae9e16c02206ddd1c56d00f8c7411fab61aaf7ed742c446ffb19cd3425aee19fc4be1806ba801210251c35898c351a535daec06434f0506ce0e893e9afd3bfff147cd2bc6f2773b2d272e16000200000000010198fb9eaa9951fcff8408404144ae9074b38adb870ba053f082ab5ed87af84e4e00000000171600143645f37194c312881c0146965377f81173a32fa7feffffff02a0309d010000000017a914ac2a4d0b5260645e92e3891e88e3e80231059046873369d7540500000017a914cddb1ce06ffee4597419b13759aa748393baefb187024830450221008858f6b6fd52c3cac6c6c95ed89aea2ec4e95593f52a1d07427462cb34585ad4022027b350ed8dd78bf0cf4a9719546a7d4938cee2d15967b2d35e8d2ce5abb219e30121020aa8861a3a87bc2bd44c5aa4217987487b3c22a36e88fd087a5dfb4efe42c59a272e160002000000000101168069c5f80626250a09713ee253dbfaef4fdbe32f617d3ccf8ef294fb6990dd0100000017160014f39ff8983be60886862b727a3456aec819428418feffffff02217eb02e0c00000017a9143216c93a28f69bd8cbb686bfc5e598316a2a4a9f87ea20a600000000001976a914f497b30fe0396a0b06e2dc92e9d3c89c9461790d88ac0247304402204b6b09f5fc2762674ad1d2093bc5060c6957572bc88e4b85419a0cdd9c7b256202204febc4a55abdff6a216496e4a4c733c963215be21eee379b322d4ef91bbec64e012102b9c0104a08835542d217dda7397767058313632109b83b9d5494de097da2e4f2272e160001000000018a633d916565583db78273cfa512bbfa4cf4469d59885952795946718e22ab89000000006b483045022100ed4e2c4d24d86f03963045ca655b4c5cf3ba47e1e4c9fdab5bc8211e5088945002207b69f123ea77785565cd0b4da437c50bf9db9a6d14ef03a684edcf8be7f70c0c01210358670ef559d5ff2f4032b66c6d8eafa87feeafaa2f4b52db90300aef754b7cffffffffff02c16dfc01000000001976a914bf1e72331f8018f66faec356a04ca98b35bf5ee288ac0000000000000000226a20bd5c41369a4e694050452c895351722b951c99ec4523a373ca82cea86f6e162500000000010000000129287a78bc080d7b9572bbe9bfe5baf11051805dd8f33ee97789ff47879ad94400000000fdfd0000483045022100a24a466667dd5ef8247ebc64987587f04ea9a9906712ec19bb6e4d80a82728d70220433e33cebb8386ba9f6e4a1261f376fb138dc2d3c325b226707c93fcd2772182014730440220159dabd708f637982ee616f0f6f9bd403f88141adebf5b375cae3524f57687b2022056ae534b3bf69c7e31095aeabc5e2ea5a0bc45ff2f0b37eeac6a2a3a2e6dfdf6014c695221022650f60dab18fd397f5f8e09e6327f692e6f7949d2ac17019437b988bac1ca80210385e8679c18eb0dd00d1d497ff7773eaa8caf1ca0a48d05eec9d5924ccc458a452103a38489db89bf9c36e3706470af4e5adcd28171d12bbae0c53743dc435bf578ab53aeffffffff02a0bb0d00000000001976a914f12f2c6e408b3cdff1991b8783d1eb428f57814b88acd82a01000000000017a9144841f2154df82090fa4e2170b5c9e7e97323a1b787000000000100000001093f4b7fe3671a24608a9e709bb3dcde71ca923c24b0908e0cfdefd31ce1ecb800000000fdfe0000483045022100f4a1902927374ff1f437f09595dfd9144eac46636365f9ad0c85d2d387a3d8d40220432a953071408af828364e12305bd697f2260af581a9823fde1bbe8a1832f7bf01483045022100c72ec73f60fd966a85a4d0544f283447226aa8515c3816b405b49757679bb55c022020db4845d060f61567c8bcb602018d5bd764c30546a17bce1bef8af2920cda06014c695221022650f60dab18fd397f5f8e09e6327f692e6f7949d2ac17019437b988bac1ca8021025d2bddb2f844f8edf69c87cd2aa21ed8a5fa5142e33a4b0c9120474033f3c6e62103a38489db89bf9c36e3706470af4e5adcd28171d12bbae0c53743dc435bf578ab53aeffffffff02a0bb0d00000000001976a914f12f2c6e408b3cdff1991b8783d1eb428f57814b88acd82a01000000000017a9146f920eec498c4506fd8a96b14c9de8458d4363168700000000010000000001017866a14ca91641aa002618126e35e82c2b925bf8dee54995afdd5d77b732b5ef010000001716001431633dde18be694a5d1f29b3d47d6692a1cf9bf9ffffffff02486c3a0000000000160014cf86b95cd263bb316f3c5f99b3e4fa6e22ea74c0e7d55700000000002200200a6aeeff1118d80cf7a038fe5f4320a282e68742acfbc9b8aa5420a9be2d518b024730440220039e436da58ada9633e987555c79beb169eda1381907e8f1231bcb28c664b822022001d6efee0fed3a84c06d65acd4189b524b418d949577b1fdfade36d9cf38717d012102cd69d69abbe9cf4db40845fc3b74426555a964077969c46ff3237dc5c6a5d00b000000000100000006beaee69b9fe111c8343ab75279ab94374b951395857d2669fac37d6fa7b42400000000006a473044022034192b4927db7514541fb360a26f0cd33b37903023e274794935e37eeca8b73c02202d0c1e37ef9e9c1c93a9e66c7c4ab7dfd2fbdb2a49a73851a8626a9b6eed8345012103dc107e1d9be2a08a2d5c5dcec5adda0127875c52f3dd2f94289373b55a5e6a06feffffffaffe7e5fc069fc8f1e437f1f3b67cac823789b9a7d13a8113654533fde9d8e19000000006b4830450221008e90e296a175b7497ff7d8e01954d784f705b2602e94c5341ef2e13bf0ea032502202aa096f4e07c4b135b1158439a710e56351162a50fde53e8cf226b902a449ae6012103dc107e1d9be2a08a2d5c5dcec5adda0127875c52f3dd2f94289373b55a5e6a06feffffff87fb2e1475d3f5eda484e29617935056180b6625a2c45f489e3923fcd3552685000000006a47304402207ff85f6ff701d169fae5d95d36affd71b87cb571e68917e15638f99de5c8c2e102200f9ab2452d482317ed7313384193675852eb75b56f2ab8ffeddaae41099e3b80012103dc107e1d9be2a08a2d5c5dcec5adda0127875c52f3dd2f94289373b55a5e6a06feffffff7c32e379dd9c6c544ac740d8ec8932c5f91705dbe11c17e36b1be5ee2866a1a0000000006a473044022011756409a24b70345c66f81a825589a10a640512b54e63183e50829543f0cd130220085ec0324a44042ef221d43bd3cc165d3c4ad45d74f887a307230e6a73e5ef61012103dc107e1d9be2a08a2d5c5dcec5adda0127875c52f3dd2f94289373b55a5e6a06feffffff273a3c362d9e3a76aa3d43e56f420f6605706bcc7c31bcb663a52b86f68fd92f000000006a4730440220486e52a7b5d24a5264d6bedbde62fed1ca0bf2dcbe2efb08f492768ff0c7efd202206c0fbdbb2f3489ba3040aaafdc797ee99f9131d70ec0efd92995eee143c0e6ae012103dc107e1d9be2a08a2d5c5dcec5adda0127875c52f3dd2f94289373b55a5e6a06feffffff2014abfef2891d3e9f4d46dd4095a80af5d6a9e3b4c0ede4a8100d1935b4a572000000006a47304402204d2b1e7b646fe65fbaa8f45664c7571f13d96627ec32937c9630f689a52bb2f40220092c00254a3c45c70c525866316726168f3ac5bd373b49941b78526f2b10f5db012103dc107e1d9be2a08a2d5c5dcec5adda0127875c52f3dd2f94289373b55a5e6a06feffffff0280969800000000001976a9147b8fc1ca8b173b7ba08320f26b0127239ed6040f88ac88933e02000000001976a91448c9dbf253a849e14523aa49f48cd05d04fd569388ac000000000100000001f59173ba2426d9938452d0fd0fec517fa9fb6fb56c9d799b48fc95809cff4341010000006b48304502210087b051d6d719082fe0001db04b72bf6e0dfdd7b3ed89af6552e167cd1d04e3b5022019e85854e07f834bab93a8032d362131544ffda5c84ac2da32480fae8c1f48300121027fb2a4204619886bbf6b83045ffaf8036df5fe9c5c3aca00c765d9dc770b169efeffffff02289a01000000000017a914d07d06e0b8def3e701aa4d53fa1ea89d2b6058288730e43700000000001976a914a0da45c432fccd8cfb6a29f993d2a227ed780f5b88ac0000000001000000000101aa354ea6163f86775f68108421428290d09431b5ba0c967dc6f2eeb85f23fca3000000002322002097e5b78f426276cb8feddf09a460ea23062466d9b4b898d1c933b085c9909d26ffffffff02a0bb0d000000000017a914f8b769031f8b511d12938d10032f02fe7cde519387827201000000000017a914b7202c112fceb9292f0ba4df599cd9f45d12132b870400473044022038212d1f2a733a657dc26c9995720df6aca6203a6204770ac1d175536fd96785022000f40d0e67e6eb547ab35d305e6ed5b2b8d05d1ac6bea496bd5dbd4f0311bc800147304402200cc8a3e780037a65193c7e08b03cf841ad5fbf86510916f73f8f0fedf900fe8f022037413def7ed471c02efa40aec0d66139a37ee9bc2429957d17e95b5cd34b07d4014752210319b491f74221ac7e7185e172e297b65b8ff24bcf32ed78c7479161b2f185bbc42103f50b2db2984e8741da7e7b89737b4700b938e77fbee7c5c148dd09496ca1d76852ae00000000010000000001013a4f99f51984548906bd10cff6a251c478c90faf87fbece920330132832cc97d01000000232200200ccf0c84714ac0e08870e25a5a41d6d7a1e2d771fc1c1b16798699f49570d641ffffffff0240420f000000000017a914180c78310f1440da67e6fd07446992e62598caba8736fbc6000000000017a914b8a173e2fc78c13b753757839b0e24be0e26f7fb8704004730440220500b6df55815183a5a455d79b8a605d1f627aa4675c66280ea91966e72511100022033dab55e3efe192bd8342e60479610a2ebe72092852352d225bc944b7d2f722001483045022100b3e9645b06521c32ffeb152ea702449b7f0b6d42507f6eb947fef091c48ae84602207510d389cb53e491c63e186555f8fe06e383a61fd10a7f7862c31abeb25c83b501475221027793a67aee7421002b7efc51ac87de9371fdef93c215964b22a9ef9cf94a938e2103f50b2db2984e8741da7e7b89737b4700b938e77fbee7c5c148dd09496ca1d76852ae00000000010000000001016f5a9f008ec0ee750368cd1b0b06d95ecd8f662332264a27ef121db95bc7bb680000000023220020bcb29feb381341d473d10981927c8c35be4490d71591e5df1e579506e1b6188bffffffff02a0bb0d000000000017a914c186f0062607836020ac1b3fac0eb610e0f0f31c87827201000000000017a914180c78310f1440da67e6fd07446992e62598caba87040047304402203e3f1359d8b9fb5bacc7178c9d73e9a756fe9c111cee3030e778357c72205bfb02207f97a1f443e0ab44b9d8523c9ac1dc374878a30f30938f4acc8d12537692b46501473044022038cd47a4e4881e2ad1d07958ca4068bf766c4772dbd8203ce3621fbfeed25419022061b4b673c27e98693118025851cd7896f0312814dca78c2a7277d5554c91aa800147522103d49f0188ee092a752dd56069fb54bc2cd65f3cc76931cd4c91fbab35b98de8a32103f50b2db2984e8741da7e7b89737b4700b938e77fbee7c5c148dd09496ca1d76852ae0000000001000000000101dcd76aa69f456760d96933a3eadc82a7bb9fef79a9175e48ea28fdd21cc71c280100000023220020f8559c2546d91832e0efbafb8d6ef7f7387bcc09620dff9d759ea1458d5f9d4effffffff0210270000000000001976a914492fce1025793ea7e8609161b2cc6cde64d9ecf288ac202a37110000000017a914816f7da5de170bdc9e4f3330187b39dc628482b7870400473044022025e5ac2ebddda7db3cb2dca34cfb3ccb21a94c4359574bc8194e6308a54f975002205ba36363716652494f12e399d8cb4174d4bc1a1d0c6aac138040cec23bd3e12b01483045022100dd2684068f159cc0bbfd96f7719fe2eec5a91c448870ffc62318b7381d257e89022016ba279d33db64cf29dad9b60ce261ab34b8ba6b047da049d2d192392ad3e4d20147522102e5d9d885abb3113ab4dc311059aa5ef53f304fa959102c7d1bea40232c4aa3b52102c32a93442754b82030f6bce634cb2e09b4dac8d91937fc9c03d52592e0722bd652ae00000000010000000001011a993448a04a1342dfe7aae4e8ae6fdf59eabf629a67492b56d21420f5d8eff80100000023220020f8559c2546d91832e0efbafb8d6ef7f7387bcc09620dff9d759ea1458d5f9d4effffffff0210270000000000001976a914492fce1025793ea7e8609161b2cc6cde64d9ecf288acc0ee36110000000017a914816f7da5de170bdc9e4f3330187b39dc628482b787040047304402205e09011611cab6e21c9cb9e71c3bf10390d4a6031fd931dcaf52fb8ed40f0be5022023a03c91bded2a14752170f462d7390e23c0d975c507765f378b005e98e0f75301483045022100d9bdc1de9fc231ea515fd2441cf5ce9d57af508c3ac0b9d14d716378ddd0872602203b4e3a314eea56b813e8c352875ac1080e47b6d5c7e57837e26dece85ce1dd140147522102e5d9d885abb3113ab4dc311059aa5ef53f304fa959102c7d1bea40232c4aa3b52102c32a93442754b82030f6bce634cb2e09b4dac8d91937fc9c03d52592e0722bd652ae000000000100000001847ed2c362cd9409cc1f7dcaa6610a4c28dc59859172cfcc30a729a194ce8f8e00000000fd5c0100473044022014cdd8c30cac95f87d6523c02c672634d86107e9cf919cf9358a55e5886e870802204161442f8d0a7e7baca076e3e7ae1fc976244970c8b9805c583953bcb541cad70147304402207207f128cc8210c9a6bddcce147d355e65b8f5dc65a7c83483ae4eb859ce126c0220175ea6f58a8a986a46111820459499a816f78c5c360ec659bd1c5530e575df29014cc95241048aa0d470b7a9328889c84ef0291ed30346986e22558e80c3ae06199391eae21308a00cdcfb34febc0ea9c80dfd16b01f26c7ec67593cb8ab474aca8fa1d7029d4104cf54956634c4d0bdaf00e6b1871c089b7a892d0fecc077f03b91e8d4d146861b0a4fdd237891a9819c878984d4b123f6fe92d9bbc05873a1bb4fe510145bf369410471843c33b2971e4944c73d4500abd6f61f7edf9ec919c408cbe12a6c9132d2cb8ebed8253322760d5ec6081165e0ab68900683de503f1544f03816d47fec699a53aeffffffff0202201600000000001976a914d4554500de53d1d2c8f2319f92bdebb8f6e8c97888acd1ba8c020000000017a9145629021f7668d4ec310ac5e99701a6d6cf95eb8f87000000000200000001166da822091a4c67aebf4d2b7dd96cbe043a43d3f949c8dade50381cf02cbf82010000006a47304402203a87d337f940eca6d8d0471ae559199048e12bb3f8c4440731dcd6d3964b2a1a022010949f8e41405e261e3d473341d9c6b7185318b4a4b9fcdb1d49371c4cae78fa0121033703fc32419585657ee4cdea798d511624ad3031bec6a773fb1669001656a364ffffffff0210270000000000001976a914492fce1025793ea7e8609161b2cc6cde64d9ecf288acb026e70f000000001976a914d23fa1ac34074745d8eb964c8b3a7ec11828c62888ac000000000200000001f2aa10ec7504c38b9016ec18190a0fff8313d36e06d5dccdf0c240c8ad51d3f0010000006a4730440220052a505c2ecbb408ca4aaba0bb4f1d12f59cc026c162d923a46c2965a823f15302202d065473079f59593523aaa0a063fcac385a4a480a9facdb68096a1b200c09d40121032128d388d639cc2f5a101f70c6d34b306d160228c8482d1c43a23a5295e0d0e1ffffffff0210270000000000001976a914492fce1025793ea7e8609161b2cc6cde64d9ecf288acd59e9009000000001976a9141b77b73f47e5581ffa64fa75535a3fdde42ec50688ac000000000100000001ae5b87cb7e0fc1028fcb0d0ccf559f23bc9afd528b12a23a7b2d7ec1a6b8c787000000006a4730440220399e0876eba69348cf19574780647dd3a0d54e3e2fecd4503ae15af3f2c8da970220672f9da5c7c6c0a04d200b1e880b5394af0728b2c07201aafc43ff11fc2e8422012102f0a5a088427caeb5d1da04d577252b58c920b99943179cf8055802d16f6cb6a3ffffffff020000000000000000426a40c2b1c3b9c3b9c3adc3a6c3abc3a4c29dc3b4c3b4c3b4c2abc3a4c3acc3acc3a4c3a9c3a2c2abc3a0c3acc3aac2abc39fc3afc29dc2aec29dc2aec2adc2adc2ad0587c700000000001976a91450d448e9e5b9ff77fe55f8b12aee4514fbd6386688ac0000000001000000014c61bfe477e4c84ece13ce2df5e172355ec0f2d72c9857f84d02b57de446f2f5010000006b48304502210097f60999871a4a75d080d6eaf053620d5799368ebe1be9944a8d534ea488a63202201e58932e364275b383c7f1374947887c6effad198152b2c7999b4fb9b6232ed8012102f0a5a088427caeb5d1da04d577252b58c920b99943179cf8055802d16f6cb6a3ffffffff027d73c700000000001976a91450d448e9e5b9ff77fe55f8b12aee4514fbd6386688ac0000000000000000426a40c2b1c3b9c3b9c3adc3a6c3abc3a4c29dc3b4c3b4c3b4c2abc3a4c3acc3acc3a4c3a9c3a2c2abc3a0c3acc3aac2abc39fc3afc29dc2aec29dc2aec2adc2adc2ad00000000010000000195dc9c0ef75f8a7619e203c4a155778c6e0818a5a593624425b9fb879817f1d9000000006b483045022100f459f5a0ed9ff93b81dd39a04c3ad3db52e08603a1ad20478a2058888e152c98022054a503be5ab5f27d3e20e93e64a023e4a7a35e6fe2b93208833ff63726c5f4e2012102f0a5a088427caeb5d1da04d577252b58c920b99943179cf8055802d16f6cb6a3ffffffff020000000000000000426a40c2b1c3b9c3b9c3adc3a6c3abc3a4c29dc3b4c3b4c3b4c2abc3a4c3acc3acc3a4c3a9c3a2c2abc3a0c3acc3aac2abc39fc3afc29dc2aec29dc2aec2adc2adc2adf55fc700000000001976a91450d448e9e5b9ff77fe55f8b12aee4514fbd6386688ac0000000001000000018d7cc3c5112dc8d409d19663a7709ff93d4e6930fb0f1658e2c8b854359770db010000006b483045022100b48a973e42a375898f90ef509a741aee016272f887833d0f55bae5b127e1465c022009bdba5fa7c9f6b736db5618d5145656cd658a7f538574484a7ab8fdff1f68b8012102f0a5a088427caeb5d1da04d577252b58c920b99943179cf8055802d16f6cb6a3ffffffff020000000000000000426a40c2b1c3b9c3b9c3adc3a6c3abc3a4c29dc3b4c3b4c3b4c2abc3a4c3acc3acc3a4c3a9c3a2c2abc3a0c3acc3aac2abc39fc3afc29dc2aec29dc2aec2adc2adc2ad6d4cc700000000001976a91450d448e9e5b9ff77fe55f8b12aee4514fbd6386688ac00000000020000000152e43e3e4a93aeabcbda763c9ffd72ce46e5dd2462462dcb1d16e190a5d3fd2801000000fdfe00004830450221008ecb2511b7beb4ff494496967421304dc0c564b7f9357c3a697207fa8859cdd702201a883ff90e9e57676d210610978cfaf729a6198c744a83c9fdf30a6193be3ce901483045022100cccc710a6ca9b823c5a27125ccffafbd9faaddb6f91f0b316338e4d7ac38566e02200f13b3340cc6bf27d18397b17bdc36a75fc0a08d5cf5324b2e1ab6bbb9927969014c69522103c96dd608e178d4326b54faf7f927f49f4af30108c4f5fe0fa095fb1fcf450ed92103f7c0234bc223da9ae29dec220451bee6534554bf06c26c2962698ed1a227922a2102803acd8b52cad3a7f1e4eb3af76356a38177cb92fd28aaac52a3d7e0aca65ebe53aeffffffff02804f12000000000017a914a78c8df61709bda21c3a179d6829f6296d215bc987e6d9c1000000000017a91400da8e626fb6a57eddf48463ac5edfd1b992362e870000000001000000000103b99373125a053d92a2330d4242d65bc3796a0f2c0da8c92e9ad6ecf216d3136f0100000023220020ee3be5477276bb4a92837a58c3464a453e0af1197745fbd539e85c3a5bb79007ffffffff580eaa9bbdd4500d02f3b2b1dd1f3ee8343c7d0c0d26750cbaf378972e76947a0100000023220020ee3be5477276bb4a92837a58c3464a453e0af1197745fbd539e85c3a5bb79007ffffffff5c44dfff8473e54fcfe8ff784815fe1b87db034676d3ac0b19edb7a6aedaa37e0100000023220020ee3be5477276bb4a92837a58c3464a453e0af1197745fbd539e85c3a5bb79007ffffffff029ab522000000000017a914404dd6af6d1a6c3d95a8ce4b4610922112a4b4898765e70a000000000017a914d736a738a6f2444a829b216e33bc7357a567792787050047304402206612dc4e225cdaa8fc36c44fc688bf084c5d7e0f613d8f8a2ccc6d519358119302203ce713426232452412ff2a25bc476919bb261fa9200da068b2305f8e2b8790de01473044022012e2998e3bbc3b71850ffbc2a471e972639598cfa6f86c25541ccf337102db45022042e5f91631c57410250bdef4b592a61bbb4b4187447ca8d0d07faacdfe2d2d7601483045022100fb71373d27eda0da09c1f24b2d357a715464e844aa6d11d6111dfabba96ca7c802205995ade49f656a8d46f56f491b554a07b2ba02788c973866a43784c55f1b03d4018c21027b34fc673cf918a3638dd7138e167f85f47b26e2d83cfe2387353aff3a46238fad5221032ade5a75c0dfa7fefd2fda11efc66987a21aff4ec6bcc55f66ea7729d20a7a152103a53acf7bf86f19b4749d9f005c316e58f8fe4ec8b5bb5f96aecca8744bde552d2103f72f1126c21923a1bc9183a864dad5909ac01bff9f94015e3ee231fd12cf62e353ae0500473044022020dcfdcbdbe7d351878272c0a790d6ed2ae6f14571b99ef3329ba7c84a558ba5022052411ab03cdd09f7935be3a97790b4c5c3636e91dbec804aba0640f152425c6f01483045022100e96f597396f9ddf0acd65a4037fa56d3481963be93fb4357379e6486444a04b102204d285b650406c01a77002fe1c223f19b239b458558d901c22cf13e03aaa38ace014730440220247f40759f6d847594ce0b9f9f64cf04fe173983428d9faea080c7d76838736a02200c19a4cc60ed8636ece8078379685bf71ff33f734165e7a9503ad404601f35eb018c21027b34fc673cf918a3638dd7138e167f85f47b26e2d83cfe2387353aff3a46238fad5221032ade5a75c0dfa7fefd2fda11efc66987a21aff4ec6bcc55f66ea7729d20a7a152103a53acf7bf86f19b4749d9f005c316e58f8fe4ec8b5bb5f96aecca8744bde552d2103f72f1126c21923a1bc9183a864dad5909ac01bff9f94015e3ee231fd12cf62e353ae0500473044022069b629e1e7516f9f9f161e8a7bc1f1d9f8e220c0868351d9084a78aa2db778d102202d3c0b5d77e63bf9363d15feda3fe1a6c159763f501a3cdc058df858dab2dfe401483045022100d02e13b34c995b86ff2a6a9c52a6926f0cb91afdf6a12cdb6d36720a6d59c53d022018a57dea99cb1e48a65c05640308d6824735a41410fae95a3bdc8930d5b8bf8b0147304402206e8d3582d0ce73bf442f279093e1bc2311529d5d5738a951fe00087eab4d2a5102203ba2f9463992292425b2e6c60d1b8f05ebff5f05e69593f64b73b534c42ce545018c21027b34fc673cf918a3638dd7138e167f85f47b26e2d83cfe2387353aff3a46238fad5221032ade5a75c0dfa7fefd2fda11efc66987a21aff4ec6bcc55f66ea7729d20a7a152103a53acf7bf86f19b4749d9f005c316e58f8fe4ec8b5bb5f96aecca8744bde552d2103f72f1126c21923a1bc9183a864dad5909ac01bff9f94015e3ee231fd12cf62e353ae0000000001000000000101783742859085a1ba83b86059872a220a93701ac199a7ca32cf5c03505d13be550000000023220020c6a880a33968424680d5f27c79186cc172b63b1e1f0edb4cb59f86dede7c7984ffffffff02c63d00000000000017a914d3858161e1277b4c4e82da41713b166cc15dbf1d8709de05000000000017a9143e97fcca9eef801326d83b322330294a7629c1c48705004830450221009d2bc1f14f6d30693c32832b38c7f1e4a36a648f264953aac2187123364bbe7902205b5f732d0c3f662a68e8265431a9f83b33dca9a0be35a8599528eb104611c13c0147304402203f8c1d72e1a0ad2529cea5a194283a801003524e567e3eddd0226725c3361756022031454ca70f3e8815804d8138b9d31ac842b43c53435f0d4e18f6fc211f59381901483045022100f1e259337989b379bf045bd18718f32c3ede3e78af2e601be3f951db3d9439bd0220713fdd9b18a9037d66f013422194ac940207cb73d638d37199ee93d0e2dce4c6018c2103676d60c28e8efa5a9e557a5aa8245568bda7bb6b90c4cb3f3813329a8814bc15ad522102a92fdb37ee5935b6ac438749a5f8aa86a1029a8f692514c862e4b80c63ebc3bb21031586ebe7ed6d8f232bb14e9b1eae589058ca8991d03eeb1cf89865a0bb08d02f2103a2177152243dc8b36a9a4b9fbddeb97fb1ef5af43b86db641fb1de7b52380c6353ae0000000001000000000102c5e18d40f9e2caeaed4cf000bccb43f17a1a08521463eac69a5de86399bee5cc00000000232200205551e81bd1ffbea8c217bb1277c7b212a444d25eb881dc44a2f1d41b0dec1794ffffffff73e8a3262ba3e1a2f6a7c68a5651feaf1cb1871585dfbe36cd23a6739dddb46d00000000232200207c3b2c152b490805689cb8cd1390b2e4737fa03b5ce01c178e19241c13f71522ffffffff02a83b00000000000017a914886e9f4e8a916aeff0be5187b6f182d3640c62c487f9eb03000000000017a914d414a2e0e4dad4e5929c697762365a8aa8661c73870500483045022100fa5328591a828ab03155d4637327eab18b31e1562bc52b0eb4b9f55d3c2f740e02206d43e29b08620bfcb93970827151948250917db3a77593dbbd1b833e3c48311f01483045022100c30983d1676d48f916ab25a3fe779bdd2ec2258c577643594643695fc21f4a49022016f3ea17efc927cfcb127424c7f2d8d012862e864ce1d606f8e45a6e3052b0a7014830450221008b5c73b66105fb6aa08f3f8f86cc1f92c2b3a1dd508ed2599677e159758e124502205140a8d1ad8aef8b7b2899a3c327c765c930dc8b4a3419ebdff4b55165b4cd9b018c2102e096bbba59c6ddfeed310071611fec93ffb9e1faba3de44b6516ec681d773035ad522102306f922aca108018a91a016995083f490b390f4574d4236f789be372772a76752103708a8f2e88aed1247ab91b3dc63da73ad7b31349b9b667474ee9bd91f1b2eeed2103793fb7b0e3e2002c539f0a91b73cd7061809c4401c8c95f10260ae49e03f1f3653ae050048304502210082dab55e6abad52dfc4acd2f7cdb61972b70d3e46ae594615be55a4aa41f92c502206198b5fb21cc2f77c96a582c27a8de1b9c2a98fa8eb43efabbfffb370031f28501473044022060709d78d6fb8221ea22276b47b8219f4585b83f0b78eb730f728c0f2d483ff9022077a3a406c38ebd377b28da778ca5658779834c4592d78abb05012d11e566751501483045022100f70dd70527c62425da14cba04bfa895b9f4cfef24318ccc96eea84bac6967f2f022009a4bee9a9900d964fd07aef02ad632965e875c37dd8f69dd3602c78e63f9ad3018c2103cb51cdef5d97382bec4bf90a65baec275f54265a6189b343ba7aa311c45b3774ad52210283bd54b9208e3abf16c8a6cdce9c2c83a77ebc16647a0605eaa1388bcf8825d92102db4b84d259000fc419712c33e4b6abf33342b78272a06915b98625580054b0b42103e6ee59b76d1992d6721373ce541776a29b4ef037524e57333828e11ac375bbb953ae0000000001000000000101f51e6daeafd42cdd85fa26b4681af99c50cb66768728c7ef9ae27232ac0de2bd0000000023220020012873e69e0269984e55a9023b02118a8b8d8a334b103a7f832aaae925a5cd50ffffffff02b62800000000000017a914e0c9371d23b32d8e5c6e6ad0321f6f582134539887880900000000000017a914515f928c4348109cd239a157e5bfe6741c861069870500473044022040a5dc1ab950cf5e3a601d3eabf8087b90afa9c55debef069792a2ecdc0b3d6402201765fdfa150419dc38183c3c55e1fafcc18f7880d20dd1f7ac44d216a8cd475901483045022100c8d15f7850aa03d88a82952d69b074a7ed0b54e0423f03c0d09bd6e7a1cd0ea102203a66f8c3623b12f59e50f8a41c031a419d222bf0b036e236a68bc267e13a27650147304402203a3c70109213800f3311d6c9b8f0cf5468b47aaa0a15b7394cd0d9d017607d2d022031deab1eb61606b7699825ee1b830482a87b1c868ca18bb5b90a01bc0a13ca6b018c21037af19c4b6a2dd7ed7a3591b45e22887eca645a71fba08264c04479ba10f7b3c2ad52210239d87099e55e43416f5b7e4ad58534a48679ccd9014cd5350182e31b97e56fe72103a12b7b72bf6e5540fd2e263fe5ebd60a54adca9cb2e26087d06b21ca2f36e9242103be3d908b75b3920f8c4c6ba54eaa51d81f3aa91e640b69ffa974576b958f3eb153ae00000000010000000001016bb48b3017416295d66ca2a0702550c90fc2fd1379d779a8a12baa5ec3ae7f4e0000000023220020129d13b576e1e88f6a861f20f997b95e43a2c5652cd3327bb6cfa2744c56b7b4ffffffff02867600000000000017a914bdc83160200cff3dc96f1ac95ec471dd7f77375687221e00000000000017a914fbf14daa4220e005860848ba02d6730b9a9bf22c870500473044022041cbf0bf9ffd94a0e2216f0dffea28b4498ecee54fe86182cfd73130f7401bbf0220296e4770cccbb5de5bcffafa104e652654d5fce8213744cad64cda711e8e2ba201483045022100a1b9f62481fb331ec30d44107121df4a3d29a07398651bb87750d1cfd8c8384d02200ad6ceee5359c4b658bcbb44cb972d5e7da8d2aa1fc35805999c0588728a523d0147304402206a4353e1c8911fa305a1948e1f3faf0b24a40d381cd37d998f8ef614136a8cb5022057785debf6fed4d23fd9feca07359b03048fe2d0d2654ea900ed87509fe61330018c21020e42f398eeb53dbfc6b30944a6b85fba33033fc587d725088e915c3d21fe9ff4ad52210312f5731af2929d90c22db71771168471e10a23811b5fe3679d683e25989fc56b2103630e932bea7c477c8e1eda40822a35e81e1d09b13eae810a9a097c0e8ac091212103e74662f0338750a557cb51bef24b9789f6867603df6e9fededfaac6aaeb54c8753ae0000000001000000000101755475e7a90a1ad61516fbc2779ddce64ee1a5c022b5be490bb13563d0732b9b0000000023220020299924b4aec03050530fbad5480358a63bcf7fec8035ff90f81445c94cd39c9effffffff028d1d0f000000000017a914357da0aabec4bb77acd95943b2153bc18f7e034387b62318000000000017a9140129c375744adfb355a73be5d3f374e7b2c65ebb870500483045022100efd80fdac3193afeb556b87b34594cf4351946f174515dc8e2550e3a3f906939022053b0e09ca28dd1124dc7458ad0cc3b94f7fc581344d1cf8fea1eda2b34382365014730440220349affe563b69c19ee769dc67a8346e384e1e63d0279b6014f4afa6d4354476402203653d36b420f1ecf0fe7fdfc9bd5c381948faf631575ddf435b8558367fc280701473044022037e002893797f05666acfe40503536c74067dc118ed024026f44c0656404367a02204166b5c22c38734179c7192b7327aaf39569540df7b61f31200945480783c366018c2103509bda3027c3326d6f97b11fb0f6bdf2f41e58d7cb2881f063bb516216aff8e0ad5221026f39f01e014c967cdb4c899e9272e4d05caf1c4f54ffa072c2b440c65cde42612103c0b980b81e4cf208b2349c32340064d0d6f19b6982fa555663411e275c144d542103c0e3324fd4c4054c27093e5fdc9d0bb9065941b49ed481540ea4d3b600f8aa6e53ae0000000001000000000101e5af1c7849543feb3e0f84fe6937e3526c970df720f466facc6d4cf6e50010f30000000017160014f3508c1f1c2e7ae14e9eae1a17e7aab7cf7565a7ffffffff0240420f0000000000220020275c4b4fce6fcbdff8847e24c7e678991d97cade87716d63c12bc0bbbd83ae6e699efc0000000000160014849c481a266e0e2361f1e776ab794716c3d65d820247304402204be4100887b3ed656e2e0fcbe5aa2b50e2d6e63eae0774752756468068d2390002207fb6b56e5ba243e27ef94ff9812c0dba20f58d58aba85a7769b6ddae01cec638012102e1a15d63a739a853dc8f213947d609f413c4284e29398a1cb261a3a5b7eed00600000000010000000001021bc1cdec90bdf112d289346b616b178a952010643fc26027c7906942b4383d190200000023220020b30231ca3a5afc7ed4320cfa3f7bb7209e25ca29cdc1459af7041e41c22be5aaffffffff6419991318dc4ac5de815dbd27e5f4e2f3a7b9e474d2326c42202982c40b738e050000002322002080025340770d83da8cac39b92d24ba29b8dbc007a1d152a796e7189f36a2bb22ffffffff07022f0a000000000017a914e517950cdd43f6daec46976d40e24ea3382e0c1287423e0b00000000001976a91491f095aa40db649de568ca6e58395e5f2f869a2788ac941108000000000017a914d729a2d249c6dd0d1536e28f0bbb2909c614d0ba87b4ca0500000000001976a914d603ba8fbb0e12fb8f5d241ec8a75dd52db54cd788ac86ea0d000000000017a914f62c41160ab5133aa870dbec2fed119f27a1712087de9f8a010000000017a91495d1bb81594d6387c687dae9888895bea09fd54887f4050800000000001976a9148fe278735814712914e1f4dd4feacf890a0440b488ac040047304402207e005f7eda3b46989b3dbc38a92e6167c9f7a4b5bae5299c0faf19c369f9bb7b02206b0b0bc13560e8c792e7aa157876641b7227542b156705d063b27570bb853f7c01483045022100b65c7429b69eb49532235a68c5cf7206e82eaf61d1fcbc84d39845cd0bca8a2a02207430d1b67b59a8bcfd55e67a4173c9b6432de4d7183b02812a7031fde0037af901695221039bbb140645775dd4a925754f7870a095904a732cb69a6640f5587ca37c01890621030690623ef3ee26c5a30db82316502d3be70728a1f6ec18357d37016517391f512102d6f00cc1ad0adbd36febc862817165c063350b253939cc0f3a899a2a27456d7853ae04004730440220100b72d74c6077635f4c5598507b4c66ca42f52f9b3763b658c373141906c6dc02203e9d4c9fb5e838d98e50904d733c3376cf6a7f4c5c67c81571258611b250b97a0148304502210081ffe77dbc98823262ecd8809aec84cb5575eb769bd0e99945a3f8599ffff17902202d4bd0a930f1c2458363c9fa7972f3f83232ad77e326fc4a3d856aa99e427ced0169522103416ca25eb6428e5dca90bd2252d8cb267756aec7aa91f7df60fe7c680aa25fd121039c95a9d5a5ef35596da4105c61cfc1eed5fd889d275fc7ee66689b6c2334760b2102fcf9bfbb6af3cababeed5926376f7552cb7e6f50fbc37d7766bc5b7b2268723053ae282e1600010000000001016a1a788f5907c751a3bd1a71253971eec5800e0e8f1be78017dbfbe48543ed6c0000000023220020961badc70ceb133ad62900c6a1ce44798382a1d602bbfc3cb268733f5eb9ab51ffffffff0290f81400000000001976a914f865469a9c30bb5839563a0fac6e1eedb4d1976888ac28094a0a0000000017a9148232de979b8640b3bb8797f7249562cdece7efa8870400483045022100cc801ad4c157b83c5e82bf8f945f2808c0840408c69bd4c2b56056183891b11a0220555a78549635f18e2e8156e06d9a2293193bbd5847dedc9d0c33f0e8b8a77b030147304402200f8d0c489b405d3496b4336ee56740bd06f3fa11bddd02d70c12f204344f482c0220055d4702e50f4b4a9511b3aecd5e68869ef9cb88c72c62e705c9f68db357f83401695221038f5aa46817d58edfd7f38c2b61185d5390a36c517c7e8866af668f3a47dbea7a2102ec03d7a5801c2acba06e5759ea0c9fb4d0fa753e49bd64f9e1505d183f5c1fa12102339fcf4fb1a9cc1d73abac893d48481b1523c76a17cca5d78759a749b85c9e5253ae282e16000100000001bf2991cbdff3c11c41eb2ccc60d95acd486d732f7ac4d8912949209e4b2b7e4b01000000fdfe0000483045022100bfdd733688ffeefdd2bc34b3214fd82146fe6029cf024c467caaa065a1ca3070022064c21747549ddb9c0bb60ab44dacb9f62b012439741ac0e32a897472d9611adb01483045022100cb725490821e50fd958dd61a5e1e2c0df63f1a075c4200ef6741c7613414aea002206de34e75c6b5cd9b0a20010182d86a79c76506c423b0f4fd5da1e0e3dc57704c014c695221031d81994d8c030f3937f5e4b45bb07000996be31d2b8d76296f2d9026d5f58222210219b046379a32ab3cf21884b3062eb38193384b59db4faeabeb6ff181abb1e385210308f0fa1c203f6aa3ff855eda9c40b19a7beb337b1372d46a85ab040fb1fedb5253aeffffffff02102700000000000017a914c21694b06428f73d53536e7143e38b751bb4004c8789e202000000000017a9142655164bf94309abf4fea7ba380190b31551655887282e1600010000000001024022fcc4109ec62bd81587d0546eb4f9370118285297ed667b6edbbc44fc21b40000000023220020df857d7e8a6b3e3ead6eeb7081fd2ea3817934124f43c9c8303b29818c57934fffffffff0ef0d843e8c4fcf813b643b89320ef40dec88813fb1e867f9ad829588ad87c490100000023220020f4e8a2799966a7975cb8a8fad960242252a344c0cd8044818b967b5be449a469ffffffff0211bdbc00000000001976a914d4d248c5caf5a56517602b11d02b513eafc0200588ac57bb5e000000000017a914fb4e152780ad55999d2716dec6005056bec11f57870400473044022042664a2c52d6810ac6c82d1c38bdafc1ca9df3965da69ba45f43e6b7768503730220788b9d9fa0cb694622ab7936e08c79ef241677b9862782ea4c9ba6f8849acc9e0147304402202f654cfd7e39a0df189013914beaf367f333e5c6ac9d71b7348a8670d135ad6d02204bfe6e4dede9b0e666458a7f8578845311f1981e951240852bc5a6847f622c6a01695221020f70307f346ba978c843f9d6d06a469fd5d0bec55c45a6f1446d29d2ada766c621028d97868438981be108c00f43c96fe3bc240646c5ab230002bbc725bddd614b51210265931c67e68567b09af5a36fc09639cd604fe2686a9eaa38d9eb947196f1f2bb53ae0400483045022100aec8e837cccee7f1910a35266e0d266f3ee3a1d432fe5457555618af1cfafa4e02205752e9ed9d567fe9a7d8414d6cb8496a4c28e5ba8e71af258ae17d0206ddadc301483045022100f1670e023ddc3db07ae82e0f701ec2237492bc061445aecf2b1d00d9dfb255cb02202eafc245489da9e1560dc2b0d22c6d6df2bc0654a04309fce08823ca1ff9c3f80169522103d06a1d91aa6a007953133e6098cfa9dada47a174dfea930923a2bd20c25f522a2102c14ddb040441a3f3dcd120fea097ecc5a19b405f0de3729372b6bd45dd73860521021c770434376c5aac6c1146eba883213b7d8f395527bda01b15a1633c0ee7c4b653ae282e160001000000000106376b733300be4310cb41f993ec8ae3c5096715c13f4a312014a3e062117d40b301000000fdfe0000483045022100c664f7105a0552a730210470b0741ffe09280eea87ce15e1496608f8f0b51234022053c8af616da053b4fba529cafaf1dfd1dd55f339577c6b61c316ecb78f17a0f001483045022100e459db926bdfc8f57af3f96d07bc98705ba08b51cdeb3be43431ef3b01abb76602200a7f43accbbdf3b5d5ecff4633870b46fa3a506bb2b4178e718c728c83da8436014c69522103a81417973d3d687d738b1dbe03a19c1b3722aec5c61242232d736a8ad73260a02102758f07f9c08fb6cec29412f3d50a5b120dcd22b084fa583eb9d947badc76950c2102927d062dae2bd8cd067ad73d1e65887ec504bcfbe71ce0ac5658154ccbfcf4e953aeffffffffa09ae749a38783430e029bcfc0d783cf4b97e4f1001dbbb0c77c7c0fd860d5c701000000fdfd00004730440220322fce3f6fa1769490f89c81e079b74d3ab561e16a824fff8a7a927fec4598f202207725fe45c320895a95fa1e0425bb3de12dc6f8026c486dfb0e37da5fd34c094801483045022100f641b4284b8199b4648fb060429e5e837f9d3297329117d7434747a21462709f022073a1f7c3597b6396b56165572d06b204bfe64a14bef3942777c5a4d25a8f5754014c69522102a7bb709f576a8e0d6f1522ed618474c61301d98e21a5ead8d69414694f4c7f172103f0838c2daa1572e0c92dadb962c3d981647de1a07bfa38273f3b762f005c71012103476c7a4a9a07c52afda5b9735795301bd32a8f8762d5320b5d8586c86db259dd53aeffffffff698afcd3c131143c07114f80ab8113ed3bcad883d6e6a734c3e7c044c7d55c2800000000fdfd0000483045022100ba998c695d7e7ced1bc9805839204e2c5d9a63ede96dc973d527b2903c496daa022045ecc5315af6dfb164851d57c0960c444fa48ab96b2699e3c9ecaf9f20395efb01473044022059c6720a260fe9ef69f8db1c8657f9bbe65bfca31bbdd729682d6c3cdcaf298d02201e9d3dff095d9070996a3ba2d233dfa10b940d4d7da8e31016fb81f9aea2c89c014c695221035cc710d90499eccacd8a78c1077e4f4bf31e30fdeadb9945a432f6940836dca9210365583312c0bf91733f53e3e05f71036c455ba962ad1375efe0ad23aed8bfa48e21035d3517321374a373c8f71355623ab472c9f0289d649b285c25c62ad6e8db7c4553aeffffffff42f4eb8a3185e0be508e6ee2e4835b4cea37a8ced6ae25e2b425cd938200e5ef00000000fdfd000048304502210097d82b77310fab53fdccfd8abeef998091ba5015409f4c4eee3acf827007f563022061da5ede746c1fdffb0e36479e4ba8a412f40c84c481ac548c1b0fc64a02db7c0147304402204fe1f67b15fb97000c414e42df4b4aaa17cd11279153b4e7def4d55a80429e6602203d8884b8f9c1727358216c02e2f3fee85fca7d69c9a93e8547b014193d1ed4b3014c69522103397b578d3f9fa9bc40ceea808266dea7d21bdc5da1c072da0c6ed444570621452103608f2b435497cfd26ca6b804b1d122e0898c48ab4ca969e20aed3a6b67756d72210312d3bb0b132f1e41aad18c741d61487c87eb69da8b69312e704c0f92ac5277d753aeffffffff85960ee68c2a957cf0676f29973cff076b78dae1093592344b1abe09767674230100000023220020c8f2d928ecb534bdf300dda8c7779c545e47d615aa9372fa17b012f7a6bbf949ffffffffc7412ef795f3fda2b7467a55393861fb4891d29a6ab8e1332dacef412de6a2a900000000fc0047304402200156a9e6c1a5712c188552e5e8e52b70f73bf94ab6b7aea0c79a92bfacadb89702206a3e326dfc4cdf7fbc3feb1eb53a664a6168cdb54d2317f19d9ecf98913ca7d90147304402204101e113a82813a7f6fe043624968899a1f25ff0a769e9fedfcd3a8acf543fef02205da13082bc4c310894ac3b59ef7fbf32a88a34824ded0481cf9e570dcd5798ad014c695221026674760119afcef2dc1eddef46c48ab7c48bcdd7f4ad24ef6b3b3f1a98b7157221033c1ae5a9b237124debac0966483a1b60a5f2b6deb610802719cd3afc77028d16210355895424b5a3cae8ed6c651bd0f0b5cc06c71017fb9945071ab806183b50f9bd53aeffffffff02a08601000000000017a91492bf2cadaa524d997e9b11b16cfc342e5b60742a8759880a000000000017a914bc5e0a92e814675087f25c53c42514e9614f882c8700000000040047304402206a0776e9fdebe186e0c571908e37f3ccd4da114cb90877b72e72600b47ff86ef02206e0f94239fe3af453a7e2745f4b571c85ae8d20ee279187d2d651be463fce81e01483045022100e895ee75161f5eb67f831e49ec8b635f2169b3783338eedd2b0b343202fd590302207be655b874c18b6f6a62a2449983dba1ab522db94a8f58d64048709e834266900169522102b0b8f59535adfb417276774be1951113e3119ede78d3912c633f9f126e1b3d3d2102f8e6a36c3529f70abb30a968cc08c9bfe32987a91433fa3654087790153e2bf2210271f6f8961fb9964e7836e654434eb7f6bdbf55e751d28b426990faa55618d02853ae00282e160001000000000101ac3ca984fa2d8a4e34547cdac1dcd763485d4162ed09462f6f2b8585b61901770000000000ffffffff02a0860100000000001976a9149b4c63166ec69472a62e6b197e0b7306f0c8b0b088acb4560700000000001600147fd1ffb597eb0ba638e58cff1feab40ab1e9a2640247304402200d96826a4f207c4fea0db5304b214ce64f8bdf5b14cb263621917d135f8458b30220040f55e17eae9d5aa6984517ec5c9e3a28f12ee4049426630381cc5bad65f864012103b571de8d687903a3e03bc075288db665c7dca0678df465ee319ff8ab62c22d42000000000200000000010186fa2d4c2aba065aba3d49ae45309644db7ebbfba3e1e9ccd1795235b0d561c70000000000ffffffff02f9850c0000000000160014032930b67fbc04176b152f5b1cd7af9bc7dddbf6079de7000000000017a914e2247d0485c584949114bd4f28d773d005b43a57870400483045022100db415db5370d5cc684910e7d77eb7e13d68bb180b51f0fe7bdc94806296f08c70220699d68bce8c715a5b47a4c9802223c44e28127e1acd14e721e1bfe61a00cf05d01483045022100d9aae6cf7889a92c89aa812e27af7b3ac9d2a8a12d25e58f59f0c1d4adf816230220664d98b2bd26bc37a4404341304c2fd9764e73b8ceb6dec21eda3daf652eb23d01475221027adf381ba54824f16f2d8b74117ced7ab2f25e09d34ece74b1d739d404bb801721028e24143fc8c02c8ef8b82bde13bab2ddf41f6fe4fa3b73f5baeeb88223f8281952ae000000000100000001a56b243a960b0b5cc61d401465b035d01aade58a1e21b4b4525d0da3f951488a000000006b483045022100ab340ef34bef61fcfc02e755d899c01f5da5e6dc9188a52e2d44fec5bbbcd44402200a194a40fd5290587e2b61307abd28b63a1a2a9a5449e7de26e965ca858f189c012103f35ce23074de2ed32b68b49b82edb3a3d14331c7b4b034b346dacf08b9786a48feffffff02a0860100000000001976a914a091bbb34a14e85f0ee455ef4d109ed1b1b7d75d88acdc150600000000001976a914de81c1be516867b0f58e582d48d17f0d499bb6a088ac272e1600010000000001019f574d1350753959f3936df8c20b4d715f457e08dd4eb8730603635c470e7e8c0700000000ffffffff080000000000000000426a401f6da2800fc7bfbb83907c0ed39b311f1408237d03cfe5fece31f4b1008b5f73a40bcc1fb108cb85f43ee4934973677db88085d0a7dac655bf6f59ba21a6e76d1027000000000000160014c3473bece8220457c59e59bb44e9334d732726ddb9420f000000000016001462c4448b73016645ef52093720afbf3c3dc214ccb9420f0000000000160014680e3d391cdfb1e118d8181d1c33b4e52bf3628bb9420f00000000001600148f53ce68c39e81825c82d73f98aa7938d225fe38b9420f0000000000160014a870c28dac8e1655dcbc07a548e2c6aa9ac7a90db9420f0000000000160014e26918a48e026c9b1d1604c7e5370d88f7b4a71d7ebe0f010000000016001466c426ecfca004ed819193cf9ba918f05a91a73102483045022100f5bdc9e2a7faa9d0ab75d8d7adbd1e788613caf0d1db6556dce17d5d78d2f8bf02203b7f55c309e1909aed46aa0ab9532390cf76f7adcc73c917c5edf35ca510d56501210254f191cb77dda35b452d11adbd9fb42f74484a28e95373e87810cf8db05575f70000000001000000000101dc23a8ec68404e861dc99cfb5e0a02eface4f44038e6fc673ff6e3565dcec77f0700000000ffffffff080000000000000000426a40188778e3f04d1a4bcbe303493e67821b22542b8a872b3b70f6fc7b484dd9ebc1bd72b3fe1112e7f77f9372b067cbfb19753683b6540651d95e0854a0571fa9a4102700000000000016001489c46baf3235da1e1dcdc8f443a1b864ea21818cb9420f0000000000160014800e92e430730f1c5b35004bcc1d0a0d4a2c8a6cb9420f00000000001600148fa4216befeb4f0048ebcfb58e7dc5061f53807bb9420f0000000000160014d84150e05d34b711376301802df7c83a84864b45b9420f0000000000160014e0260def62efcdbb6e07270f3fa95be12a192176b9420f0000000000160014e0b238625578ca7cac627fab6faa9a12da850eb5b7f1e90000000000160014bc9e2b6bbeb5603224e67a00bb585df73c0dd4340247304402204458de1209fce1467f37626542d8f11d26267fd5eb107a8690de4a8b86676f21022074f62983984a62fb3e97abdc86b5117bb86a6588b842d3ef28320c8162408216012102c337bf44b938ed698a0a4b3b3f77473628a6d52a71c7b5e9e9e4634b167198440000000001000000000101e5b037a6b63a6ec6f1728cf5d5d9aeb5faa65fc6091d94de055724149404071a0700000000ffffffff080000000000000000426a40f8891dd3370f764eef99116a8af7690807b42655595dadaf040e3680a866969b4681c67a65f58b59dde484b2cdc931902abd22131e17a2419b792b62d2d3adbd1027000000000000160014165df090d36fb85064a0461ebb3b77e929642167b9420f000000000016001444f244f4fc5b3025d1283b227ae73bc12962fc70b9420f0000000000160014490eb494d20a21739c1564d83f8f251fb1ded76db9420f000000000016001462c43f20a1547929f4244b3feb58fe66e2654bbfb9420f000000000016001478907e6a39783ba9fcedcafbdd3676819c3a957eb9420f0000000000160014c877c8ede58cddb18b0412ebc16d12876e428e592a7b9d0000000000160014f39a7db5ee8a99e486b2f4ef67ee410505ccb9eb0247304402203065349138ca1cc20a1729e9c7ce8960bdf988a97c2d719799dc107dae4b0ef202206c653a0e1f99680ffed1e4698497b94e76d9c38bcb117a883c5c99fe4bb6d4a6012103b2ab86965e1d9c0fd7cf93b2f78b2d42e35c50038b2f314930f0ee0ab95cc0d700000000010000000001015872f80d7c73249673ac5d49079b2f57429dbd28e2f53f708046aeca86894d730700000000ffffffff080000000000000000426a40b5a0691637f1d69f4a731f8c0d269d82ffdfa5582c8794e305846f89435e0db0f8229a9827b021599c5bb22ff92091b2ca080c07c555d6a91104e84c294877b71027000000000000160014cca43260fe68caab35ac15f9271dd2931447003cb9420f00000000001600146285e07d5532ea91cf107ea7160ef5887ee4051eb9420f00000000001600146f7b50fc2161c8d4c86b25bc7f1957ddd08d9f9cb9420f0000000000160014ac4b662ff8d183ecc8d5c1e94eb8d27d746eb6eab9420f0000000000160014c50af2fe86bb97930e3ff392ae4660caf944085db9420f0000000000160014ec91a8ffce08064d3e3f7da3f60838a00f1d8ce69d04510000000000160014ceb96aaa49b36ce9fdd0053accd39eab9d901b2802483045022100c8921e18981cb56df738d4a33faef5a047b7baf833803062e8bb36fad8cc838e022064adc5966f8b20fa55fc07a369477ff8b8fd8bd18bfb1bc213dfe1358aff68e9012102033d8bbec38725a37577854fe7c0ad5551e0c3c90844d58e81e9a30991a72dd8000000000100000000010105b3415e96195a2619d70611297870a9d72e09fff9ac1d65744e7beb2bab2ee60700000000ffffffff080000000000000000426a40a6f583b2c9073c122c43f1ac0b549f9004e77d106a4efee42abf6ec4205413ce8fec2f6dd445f08b6205d0daeb81c9dbdd16b97bb65b7025a0070bf2ab5e174110270000000000001600148df6bb0dd7fb2ddffd8e576d12dcb53116b97716b9420f00000000001600141cfb0f1fd6f0c4313f62054fcb9049cde6d95965b9420f00000000001600142e08cc7a08d609a1cef5c5c4dc51c121acc6e735b9420f00000000001600143efb7eabc4b06558c2446f686539df00ca71e816b9420f00000000001600143f9269fb79fad7bbcc9665b34af65a6b59555918b9420f0000000000160014de7d5007a269f084741c9ea95666002783501c59f147c3000000000016001463f516111359f85bfbf3c2c90674e69e95f8976d02483045022100e46a182506b3d58b9ff40b167cf6a22cbc52d9960a3f42764c09c23469e99c9002201aa8937757e287f92daba1c0c48d6ae8c45fa5ed8395e9afbcef764440c3dd9401210209868fa9569649df756fd64e8a62de7d7dd1f8ae54d13b6c72e521e1397fdc8000000000010000000001010c4d92257fa34e78a30452c5fceefae7265423264c7309738818d33361c114b30700000000ffffffff080000000000000000426a40778cb22043326693e50594fdfd2f3e3d97d9cf5d8f8b0edb429481e1a807cd5e5a9fb2daf08aac36589b723a80dd7f5d3248127a9aa9ef48df1690b81a3d2b6210270000000000001600145d84aea17910471362e5e1b2ebf24adeade33026b9420f00000000001600142f08770d31f9d6b5a63d3fca6ca8df29bb7bf1f3b9420f00000000001600143d489fd981621dab6cf37b1b2a390e4c2927e310b9420f0000000000160014672946bdf9feaedb1cf06244eeae16227f72a696b9420f0000000000160014b9f8d05b2faa4d54a132295ab2186c2c8609fd19b9420f0000000000160014c95c36a5a21f859261478ce15ec144ffe2fcb2f75b1c9a0200000000160014aacaced13b0c6bfe8320a84ed29eee7f6b8bffbd02483045022100d7a06d2c72313934480c19b1f8ee0287676000e8cbde90d78727db4f37617c0002203b120acbd233cf486343b36e1ccab3e74d3b6e3f249c0337ca1263e2ca0e9fda01210262cbe4070b52e0d1bf2fbde044b1e45814f6004d95f5dbee49d015efaedc3f000000000001000000000101170086bc90a5119475815ea72c9cc100f5e8c35435050a9b8522e47b0ffcb71f0700000000ffffffff080000000000000000426a403b68b8bd86ad92d9babb3d9891c679334e7c09a6c35b49f744a27e2a7b6b83b127fb107270de4d2e92c3726e9be2d8888e86f47f52b9a7ee580151ac6a465e7a1027000000000000160014ebd000531088b2133683eab7f64dd222c763b095108e040000000000160014a049fabc2ac6ff0b4a64fb1d8976c2ee411b035fb9420f0000000000160014000b71c5c1d74490c685bb27e3ff20b83edd34a3b9420f00000000001600144330a8347ab251aed580272d230ab6c132beb0beb9420f00000000001600144f594a1ff6b1938be0e3b019fee595df528b3740b9420f00000000001600145a20c2f4a4eaba95fcbba6e558aae05a6ec876b6b9420f0000000000160014e9c418eeac00a5a457920b6a349a4f6b18d8b722024730440220298a64cc59525970f942636cda2c9387be3153769d77a9a5cad7d1c5c9d083e2022048a72b64c1dd67ef1c4a605211dd4dab48fa3d4c787e16a94a4a36254411f29401210258fabe134f3cb5e8e64a1a2abd595cb81fde5aaadf8b043bc2ea77249077a7ae00000000010000000001012077156ba605d6ed07bac08932d11e7d465bd9ea8c1e7f6ed23b89baac05851b0700000000ffffffff080000000000000000426a4025c861080ae0c61ea25ded51105bf4908156429677e3bbf866b5251fc23872fd6bc1b32a04b256b9e3ab61433d45c52a01eaafe846402b18c7931760d4c3118e10270000000000001600143464c2e2b02c94c1a3551ab3a8d6c707f1c70d25b9420f00000000001600141f8c4b7673982f4538ca18bc7f64c374b52ee5e3b9420f0000000000160014215b4c2fa77168e1fe5b1f96e5cf65dc1eaaa874b9420f0000000000160014b23ad3aa1466e122c63395f0b4ba67ba26a26bb5b9420f0000000000160014fe9251dd5f65baffa91ce1e98aa826d90baf1f3db9420f0000000000160014ffe0a02d60ed9da874ff640813c3351341dabbff64d17600000000001600145cdcb7b392bc4ffeb751c79e757794f91174790402473044022004940d98a716f14b13d795a09de447897348050f8aa72d39754d3bd9ef51aae8022030c78a0c2445b607211a80e635584bfc84c971f3efe548d4012c53c18e6eef17012102c287dfdc6586d7ddab302befe0776a6e8327227f2af9d3716fc587c1534b04810000000001000000000105e5b037a6b63a6ec6f1728cf5d5d9aeb5faa65fc6091d94de055724149404071a0500000000ffffffff5872f80d7c73249673ac5d49079b2f57429dbd28e2f53f708046aeca86894d730200000000ffffffff79110aa1883e14c5cdee4767e7406a1908dcb28ed75c302133e986740cf6307e0400000000ffffffffdc23a8ec68404e861dc99cfb5e0a02eface4f44038e6fc673ff6e3565dcec77f0200000000ffffffff12266e548fb215c8dd897d0f270b2c6d699146ddb44ea760ae2105f8be6983b70400000000ffffffff0540420f0000000000160014210011f488eacc9ab0b672c532565ef69b8c254b40420f00000000001600142d8cb338931568e9fb3824c7953bf873b9034f6040420f00000000001600144e72956727da37c22267f1b8119f5df9c038995a40420f0000000000160014abae590319b172ee14bfbd3abd5865639a83f9f940420f0000000000160014ac9ae83ba9dc0e5b8b768ec98d0ccee17a3722dc0247304402204246c84c9f26b9e07a81c2639e906281b19521134237222ba878fed6cfdc614f02203b25f47ea0349bc0e5adb966dcb8228626f9d36316c26fbf4d4da1fcce2894af01210286b2c1fa614cd643d6e8fbc554d718d779c3ccaedcddd7605ccab88a7dfe77a00247304402201c8fa3aa73d8a5ab6125b8bf10bc8565a326fea818944c29f88a6af7686597850220086ae0cf8f0892779faf312ef325e3a887947865cc765b0c5cc0a80cf6ec0b770121031446a4e474388622eb1435ccc15cffe80aa6402150dffc761bc2501a2b893f5f02473044022063c603bf27f4ec2a7bc84e9133b0cd1ef73847352384e84258349c1f3bf2117a022023a05e3b35f18ff1a550202bcabbc977525a0fa556d1a2c294a1d9558befc476012103ac258f6a610102145e29910f20e6d7f237dcf04f3f8751c1c39499d124078b050247304402207b147ca1230aa1b94143dc6c798fc30b2b0e1ff52d82ea5a514b92faeca915f2022038c7b6d05cfe5285bcf7a8bcfb5822895a3a5429fcc2b77a1cb757a3f17e39920121020b49512be6b34bd41ba74511f0fcc18cca93b797446cc88ba93875022d33497d0248304502210085ec97c93eac9a70cee0a0f3b866c260d55fa6fa8a55055813f06c7dfd52c0c202207d2a0c67c39edf7c72041a063184c50d528e1a52681aa721b8ca4632f68d92bb012102f3a43d7901d63e70d7f90dd372db0687de6c490ed443bcb24b0cf53a029d34310000000001000000000105170086bc90a5119475815ea72c9cc100f5e8c35435050a9b8522e47b0ffcb71f0200000000ffffffff37d3cc2f52284854ea68d2b79f0103cd03ee13c27fe334514df46b55adb1f6430400000000ffffffff0c4d92257fa34e78a30452c5fceefae7265423264c7309738818d33361c114b30200000000ffffffff50b95164469b0ea83bd903e180d3252f950a044839de6c8702d6f02c2673a7d60500000000ffffffff526db28c24715302d79249907767789975dd34aade5bf79a5fb82022d331c5e70600000000ffffffff0540420f000000000016001407fa1c6ba0baab63f85f75923b9d7e101bdb148140420f0000000000160014a27bff562ab39e89b849f79418183b311875d18e40420f0000000000160014b261f794e40cfbcf0e206271c7dce700c35b9e6840420f0000000000160014bae0039fd24cf28e880b56dfa78365db01a4b34240420f0000000000160014d59d8f4ff62c0165283d1d29f7e7bd7ef3901b620247304402206b61501c72282d790c1bdfaf727d603b5d71c3edb7f2c7109e9c3ed534c86f2702201dfcec6424ce3ad16fb399ba82030ff707bcf36bce0f892ea038c4b466f9f8e3012102578f300df5151a23998b270eabf270bb40d7597bc93dc2497092763bec8b3f5d0247304402205fcd259f9dc8dcdee700ba92e61fbe0c9074be5690910d84c66115000c67c1b402203aaec7cbe424a8a9963ce9d1a9cd939bf97bbaa82734aa6a7aac5339f134c600012102cf457a4e184a70e78ca863fa5d67800dd17a318b5f84e726f3b07c9e2706320602483045022100eb8ccd123e4320dbd6de69bfb9f961ec6e8051989e3a97775fe0a6c418b0aace02204eb10eca584297ec6a975e291d53ebc47dd316873d2c7a0c188c04fe4a3420e7012103c4ebeecec523a09a38d838a0d343fc947a6b9278bc754c4d8015e72a1eaf112a02483045022100ed1ea63252588bd12ef20f6f75c3b6eec89704ea80db4200cb390f04f3e9d8a8022060a9031d70b6bc4fc231600010c5ef740442f5da42ebbf1e8610a0e8c9131dcc012103e822abda440292083d39168cc1b0890c051498595646b4a0b06ac7afc57e04ff024830450221008c3f4c32cf3a1a6443090acb7490d7bf701ad24c5a65280cdc2fefef944671c902200f87b0ac708f0e8cb9ce2f30abcab2869a90ec7b3856c9b76109811e34f5effd012103f24a62693bb8583a980d7d434db5b34ccf65b1522b5ea5975a72f0bb8f8ca61600000000010000000001050a6ba13ed05fe8dbe404a2b6494cca724ebbb322e8aa7a69fa302b2cdd702f830400000000ffffffff9f574d1350753959f3936df8c20b4d715f457e08dd4eb8730603635c470e7e8c0300000000ffffffff0c4d92257fa34e78a30452c5fceefae7265423264c7309738818d33361c114b30600000000ffffffff1aeb89601951cf2de2f507d067a75d2653fd32b998bcdf18984efde659b39cc90400000000ffffffff526db28c24715302d79249907767789975dd34aade5bf79a5fb82022d331c5e70400000000ffffffff0540420f000000000016001437d35f7488a9c092db66a77e278f717bcf4448d740420f00000000001600144b5da2b632708ae7cb70e7c31055cc5dd9bc37e140420f0000000000160014a7d15fda51d40ee3c18206836cb7640d4fc2cc9e40420f0000000000160014c8a8ec38b3000e13d726d187298756ad8007c54840420f0000000000160014f90584a6c4f31e63ccac5feafd90f841a21790620247304402200e8c44efe7181a69ef583a0e41623514428d68033fbc8f97ef88bed284178e3302207b1027d52a7324820b8757c274dfed89ca6b7a57f599333075f61387ab127a0401210245ba97af77bddcdc1a0e5994fdebf5d3bf06105bd94adad4bef1fb82c2877c830247304402207379bf9b68428246342ee1e81060059fbd8943bad2bcd744cd6bb09f26aea0a302201712e9345387d04539873b6f401bd539d8f77c8d984b1d585b30552a72fb56130121031881807ecde9de561b15fbee33241d6daac53ede1a52f61d4cf65f877cb5059d024730440220760c5ad5159e5bcde4cc6174698015a5a3b09a3ec36f9d8f84e3b7665bbb50a802207258386b9691ed0aa64180b5b66a3166874452fb73aab13e7ae2738f850cc726012103a0b664adca4f8fc74ce25840674ee7ff0a9bde6f739e17ad2f48ddf58673414302483045022100fcd436430db2f2dd740b0574e423466a97749c565bb3a3d8eb18af37720a2c15022021f59169245f755475fef6d8c0fcd2aab774d6629416f5cea74cc4c76f6fa23e012103180d76919af5aebb2aece8ed272a5697e44ddbd19d8c130235c4903ad102aefb02483045022100e6bb941fe1f234e389931ac1e17aa5f1d7c809cb79171cb7e5ec86a676080a630220783df9466b6557e81f2edecb5fbfe2a326e074479e4e429b345a12c7a7c5491e012102fd8a1bda47fe804c9f6a6cad40b2a7860e5883c39ed5af5f4cb047482ed38fec0000000001000000000105d2fe5910ed97e9aff993387b5cacf51afeb353540f91774673de5121f2410c130400000000ffffffff2077156ba605d6ed07bac08932d11e7d465bd9ea8c1e7f6ed23b89baac05851b0200000000fffffffff0a27be97952ffd9f86fe67372af5b24e8f37778b0b04c7fa77b8684d00cf05b0400000000ffffffffec17a38907da5600d8d89f8eff6cd1e005697162162bf4afc9d1bf35180a6b750200000000ffffffff79110aa1883e14c5cdee4767e7406a1908dcb28ed75c302133e986740cf6307e0300000000ffffffff0540420f00000000001600141f77fe8176f0e9d0452ce337e62657a26c37b6a340420f00000000001600142360025c3b7ee8a9e3e5b68323d21d9efcc942a940420f0000000000160014688161e78aeecdc76da57c20147620b7b444ed9340420f00000000001600147ddc6de2c630eba772ba473ea352d1df0405c6ea40420f0000000000160014cf8239db79c955fdf9d1d70ad36833f1bbec7ca902473044022058dbdfe9731f5b4c8f6a85e2e2d01c3f343a875206ae4b964c5c9c07102f05af022014df70a63d920c43809dfd4897461f3ea4a6013ae6f8ffafe43aa44456032721012103fdee88713ad971ee2cb73f58706b6f57fe4d0840a9dcbcc43cc96489b3095fdb02483045022100f882e6d3592606a629e9e92e64376a64b5ce85130e44b39ee31123d6dae4557d0220099f5b675750f11a1f0259d6435f9a964d6b7ab659242f8fcf27642707b963ce0121031148df73cecfb9b05375e478fc6ad0f97f3a04216275c550fbab97f83a0ac7c602483045022100f3e47a24ff4a0a86b7f667c3338002169148aeb3a95c74f926360b85faafc06e0220795cf68b0a24f620c5d35c61fb0ad3751e68442065a091d63d66cd9b08657085012102f61d8aac49b8bcd7c920c890e07f40c7e649bfea78c1e5697b88d851761faf820248304502210081eff63bafbb276fccc8a691f76b8e56b03bcc20652d74159a84d24e5aedbd5702207edcab86486f9d234f4e10539e27cf642c5e01dc0e2d6037f0c96edfabc3f9f50121038ba0ebcb767c5f0c620f7355ff3a52332cd76af25fd87b3f28db04c0f2cdfdfb02473044022066aafcd58ede5049a1d3ad991f27fcba8d80ffdb59c021d87cdafecc44907f69022059bb229f58ab263ec8f6e33c8eac946b229486b9467d743361a5f9365b92edca012102e54465c6b8d22a158da914b186dabc77a8fd7b9ecd3c36747b93b22e3b58bce40000000001000000000105d2fe5910ed97e9aff993387b5cacf51afeb353540f91774673de5121f2410c130500000000ffffffff170086bc90a5119475815ea72c9cc100f5e8c35435050a9b8522e47b0ffcb71f0300000000ffffffff59e924baeac24cced25f4ebcfb86b514646a889699c78b82c8ed07cf5f564f510400000000ffffffffec17a38907da5600d8d89f8eff6cd1e005697162162bf4afc9d1bf35180a6b750600000000ffffffff50b95164469b0ea83bd903e180d3252f950a044839de6c8702d6f02c2673a7d60300000000ffffffff0540420f000000000016001418409789372c867e6592c28a8c5230b69b39233040420f000000000016001432244447fd866001c587d75b2dcec7660b08aae340420f000000000016001437e9d2bdb830e51a02b1ea7ae10c495e9ae471c040420f0000000000160014412a8f7827d5ecc3cfb721632391ee1ad20710a940420f0000000000160014fd5b1bb123ca72faeb7aa2881a9c3d45d65b3a5b0248304502210098c37ab84c35fd0526e092b68005adb31013f8d4da1d26c83ebc9a1c2aea4a4202202317859d0a1aaedfbffaa158b2b8a771eb1bc386f22ca004ef48c0250f7be21f0121021ce0417843c482fb59ad8acd3d56f17b7b47f2c32f976619cf36788357d3a3b902483045022100a590fdbf93b9c56e1fab713309841f602be343e302a662de1004eac8b928c81d02200dd0b851e5ed31fcadfaaee462b12b2ffce4e2f28e4963909e4f1a2edecafd87012102b5a17a524356b4cada0b86845a531612a2bee1c986f1959b12b7f4a43b1de1b20247304402202bc461ad4b81c95ca13b6fd72f3f49241041f120b9d6d3afe15e1530e34ddd3b02202e60ca7590ba48597a3df2aef1fdb49edef2078abfd8f4da7e1081511531468f0121024ec4fddde6d6775aa18ef72d48d2cb0cfbc4f09f13fcfbe99ee8f14b2e37881c0247304402205020fdbf31c9d042a43c861782c4833dc1b6bcc9579d7c0354f6d45f869c563a0220788bdfe7763336a60886d106e9a7551aeff1df1201f10c275fb9785b91561e190121031dd0986142726e97b15bbbc1727f49afbf2802d1cac2b14f54aa283a125b695b024730440220287b2958597d0bdb520f4160e4da7eaf5da960f99c0994e97e8576a9cec9e1ec022019ed06e2a72c98b52be6ef47bff4e194dae3a3fb0d5a8ff15450b27a54c0625d012103eef661841d9bf436ed0cd704560d603b2417d1a28eacf5c9f3adc16145708414000000000100000000010506d74d44dccd2798343b17613e73d4b13f1263de261ec47dae93d01fdda4592c0200000000ffffffffec17a38907da5600d8d89f8eff6cd1e005697162162bf4afc9d1bf35180a6b750300000000ffffffff4ee1d7ebe10657631a1200b885d19f535dd45801cbd2fb70a6f8d5f6c38cf8890500000000ffffffff12266e548fb215c8dd897d0f270b2c6d699146ddb44ea760ae2105f8be6983b70600000000ffffffff02775dd9ad160bee1fa2130d30d528bd9205c37db1cab8bc2220756f734726f50400000000ffffffff0540420f00000000001600140152237541cdbc01abc0de3a82adb8dc1785819f40420f00000000001600141328a3e29519ee651707adc9926b5cbf571104f740420f0000000000160014a315b3021bc5bf23cf25ea77dc66dc1674bd28e140420f0000000000160014ac12d06ed6a18c2f3ff8c8158b63e2aaf45bc8ff40420f0000000000160014bbcc5deea9a7e200a2ed563dc5265a56269b637002483045022100a250e319a6e8f6ba1616e9ea58c66033bc5a40fd84806a5a1467a87f74b0cf9a02205c602ffeedcad47c2ce181ce8f0536bae00cad0260fd285045607f4dd81863d70121023233ce7e34b63a3985ac3e65ab3f8ec3df566535fbe0f941665e6ded7f8423a102483045022100aec036ed8099ef8136003533f895af34702af87b984f80231bd33fb0abd3e3510220723f6bad802e8b2626a4a508a2eeebaf6ed2f8d2f3bfdf4122d718200581436801210255b2f768f6fa97ba1db9ad8a0eb992148392404732989412a1184088a23d12eb02483045022100f86ddb24c5f667e5dae45485577db43ff21d68178bffffbe6ef4bea46e3d87d402206e7b251bfdb380cf0f37464540abe478c56e0a72c642d1ed6133139d26c57f4c0121032e0bcfa9d8d36b9bc0a7a34cf6152ec8dcb4a0064da0ad0fce62aeab0e850958024830450221008e76978a8804ecbdb3d36d87b2ba3f5efee72b44ab3a315d60f347e76a97070d0220765ed68fceb7bffd647f4e22ea9c240fe93210dd16c7bdfe97ff30f4fe3fc9810121023bf73b07ce50c28d0a20a4ed28c3acc74fd933606e1dc6ab649f3f480937b18602473044022004e20d0cc39be4e25c9f8e2cc320038eb9b9b24fa4c85debb13bdfd1258f637c02202d695942d2fa6e2fac5ce472ec6ab0d43296b0ebc0d2bc3e2e96c0f531829a74012102ad8a9062bc706ee37ea3eb867f4076549b99d6baec2a44a32c1c77229a03671c0000000001000000000101ec34b3aef3fa8ca7e5a60d57987cc296fa47b48fac8e0012f8c10662b5ea457e0100000000ffffffff02a87f000000000000160014725583e973c7fe4149ab4ac5a440f3bb5caa4395b03600000000000017a914d9fa1d36e6785f7e61dcc4fe94e47cf6925f4bc88702483045022100e69b000702723f44a01f2484e386ae70fe535746f2d10d3675028e58ba726cfb02200c28f63ee221c606b9450cde04bddf91905a8ccf2f677f3d8e35f389be0f304901210249770ce7a746ccddd4838f810394295508952da6958c46d21378d1c40fdb908400000000010000000001015eafda04a45c16f459829fbb441a4db81b57a3e975f0994d3ac6cd0425aa9b370100000000ffffffff02b510000000000000160014fd0c01d23e961b11bd645cedcc2d1829ddf2d4cfe23600000000000017a914d9fa1d36e6785f7e61dcc4fe94e47cf6925f4bc88702483045022100874a787f2f1d66e197a71b3e1a2ba80367becb99deeb0f33fc8d507d72c824250220366bc8d76ac0cf75ee3b902bc9123f6bd9b4f6307297eb95f9004e7bb490e91a012102382d1ab4570f4616add0407cdc30012b1cd5b083ec2b9628ef0ada9c3be0348400000000010000000001010e18ebaab040812eceffe8a56c5d03ad6feec9baf1a75adb3e590e55eeee10ed0000000000ffffffff02102700000000000017a914d9fa1d36e6785f7e61dcc4fe94e47cf6925f4bc8876508000000000000160014d469b4510ef11a72421c4712bf8cc73ed5ee4d5402473044022077525667e92c5d155422313ec4546a2cb16e57492410fb5e1779293e2ddcfbc402201a63cf1d894741a33154cee939737f5adf9c82202631e42de46e6d0f3e5c03f0012103e29fe41442c4f88a08e4cb6c0788207d551b1cb3f8b3707ae90c21cdf12d0dc6000000000100000000010123de7a28fdb63494674ffeab8f66aa847f727f03af147f5f071b7f652044cb670000000017160014a3107d2251f1cedabfff6a027e1a9a7ebe939fbdfdffffff021e7479000000000017a914d858cc193e77b7558dec5b04e0c9e0da780ad09a870000000000000000226a20a517806067bd023d479bd91838e112656ed274db0cd2f4c799fe0a6eb69289ed024730440220153c04ad4338f128d2d0427bc6e95629c8358c092fb27695a8aef1c3043f37bf02200f47b330758a3be5c4afbd1b2fcd7aef7dc76135c32c7a3027fd6d0e68d0eeb001210287da90db67cdecd4c59277fe5399f3362d61531ad4ab2500e7a6db7f74d39d78272e160001000000000101f68c9571aee1eba0821abdc976c0b992eb0b9d9800f6e9743520e7d88d6f48700000000017160014bc4ba930bfa944b6acdc93a15f3086fdd501cb55fdffffff02f46891000000000017a9149cdac32b184e32f56f9ca6c248455cac0a8d347c870000000000000000226a20d4723e942f45490b5db02076af578c29a9b35a5ea6c0e923fb7c9fd858018ae9024730440220357e783e7c06f502fba19c58441c2db3442b3e6d1fb51b86b44753630e28f5c20220769ac833815cf31566f69d14c9cc522e980602186aa46f1675dd283839add31a012102b4ec0ce937ff5fb7d9569c590a20210229f89499b6e051e6cd88c68d55ab6bdb272e1600010000000001012581e82e36682de73401031f5b1b8bfc62da81107da6cc5143a1f12daeedea270000000000ffffffff0237410f00000000001600147b283009e67a6c7ff7c95b9eef4992aeb06825fe40420f0000000000220020a8b906b50a0306580981c8530a02cdef8ec1595f0559532de971e629103ae65b02483045022100fbbd2dcd491a3bf06d36ba42db02fe80a2de25eca75cb6b51a72880ead721ee9022078e75bb3f715326ffd1dfb50837554b2b5f3a19a2ca077311502a55145a399b4012102d2c738fe222ddb8ef85a552847fcccae562c06120bcdefd5d0349d62bc7e16af0000000002000000fd1b010084a80bae8df308757df40919d809894283d8f46131311b5ea9481c72aae189010000006b48304502210090409e9e61e58c9f966b3db7d340f636a41b97a3edb198d810149b6ddd587bda02203e2ab75fb1be0a8c5156d0b19d8c725ec9d55db443606d17da417c83931baeb50121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff028a16f18c152fe0bae74ca06bc3b022f14beb505a265ee524bf3b20961cedec000000006b483045022100a3d7793fcc3e8d9b9da8212def5effe49e9a7029e9238bcf0d33791056907b3502202448f7fb12f993741df4fdae3742b25f5e601caf1036816ab8dd82cc99f841560121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff02c0b49b80694b276457c8cbf2c8af8e2befde44d64d7c98d1274d5f6024e278000000006a4730440220537cf1fa732e7a9cfdb20f9dbbc10820075d6fa096515f7dd69a4f6816f8225b0220790dc00e5be14a49891c601409f37e440fadcb70574e4b237994524cbfc3dc540121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff02c7b6881f6d70a8bce9a4c06cb15d25b345e244fc6960a73978881863cc331f000000006a47304402203807f8f4d98173dfad242141139b0725d5955bb136ebb4ef022782771e6b58fe02201879b63b29b1bcf3bc26dc544a873d78aee32197b122f3f921d7c7d560266b010121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff02d1ba7e985ddf146500c7b41cd02f21b07ea48589bf43db21fefda67225ebc6010000006a473044022011e4f478b30982e620e17ab2dadd49e94a22072008f28e69a9f45467d2a3acf902207b92bc4048b9f56957484a3fa1cfb670ac972e80a07a7f8ef2dd0add267f804f0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff031a7e889676aa9fafa815c38975db59210b0dd14af6487675f5b065ed2e8801000000006a473044022022118a9540d62ba1a97978fb2d98e2c5801396d99afa28251d0fbc6b6b06baf9022027a1aa74cbae4b5c6e5285aa78175bb2d2b1d11199ea66914ffe93fda9c388f20121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff03fa0b3a85b0fee72029f030f4ba8a05a4913ac911d2eac3deeee25c1a83cc50000000006b483045022100c86ae8d5fe69f557e4e2704f852d45819a9210f7011ba6205601dc21ba4edcd00220479f79ca4a2c2af2a88ca8234ceebc024613a1598ceea2bb6beb30780ae7b5270121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff058714dfa96fddc38cbe99f90600220af2839f47f1b6aaacc38f444addd1adbf000000006a473044022007332560d80662ead42c258d2de62ea685559a51f777975a0da73664358e588802207b75ee031ef00d231d4576d40e1488cdd1a4039a8a5ffb40c7070efa43a23cf90121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff05df336346ca5acf8c5ef0b7bd9df1c41dbc2a8f8eb8fdf022d09b0882d22710000000006a4730440220533aa56cf1156fec280cc5e5019ace36a7b3c1fcd770a609823a053c6668123f022021131d988e0f3afbd497c715807816b2c72022a23ab07bf41c184783d50305730121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff05e6d9c48e2768c3ca67c3685f07318512555f3dfd962be7db8098d9ae09403b000000006a47304402207f599d6856a7166bb23155321d38f098a8b2aa88105de3f14a504ed890201e7502204a70aa727321247ddb65f50ef35bc59cf793a69a299048ee836248c28e3db4d40121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff0678271bb3ae7f0feee09c23edd71bdd8fd7029a65c20e0f5c78b2a9c76cd0b2000000006a47304402206768056595616cded4ff8f2876ba5f21214c93a7e020d5f9958643949e946b3502207483500f2469bf9196232313f125df9036e247d1b2a0116837e9af2786b573850121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff06e6a532f9d1c943f30d4083837be462b4e763035fb8827d67f4db191004e39c000000006b483045022100d9dfe5de4d325a84c57e857b2e1ec447ea0ba7de783f1220c49a40350f3c74c80220363f7f9cd8691f13838dfcd6f09c2a9a04337e1de76b02c9eb9f85170a502e210121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff07a6f0b87f7d1a38c6ec36771c65ec5533fe075a4fcb7777b2ba60aaa9bbe3f9000000006b483045022100e884bf992c7ea890a0dc92a4426bef9e6197bd8021e22cfb7cd3e50919046598022022b7487afe90ac936cb72fc5c7bdae497519f1888b6b202cf51ed6288dfc72cb0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff087303adc5e657d8ef9df68959d2cf209c4370052c1a0e28a05b2818d892c8ca000000006b483045022100ae5851c3a3001df89cdb432f9df71e3efaf22c8f391c960692e1bfa31c528e0d02205a201051ff6023333f80be2f59e9aa05641242c9a1e0593fd76fd0652e8db7680121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff0a071e9bcbd86155e42fa8f754e34580f589f955da3af5891ab34f24d7713f10000000006b483045022100c59866c958ab8df90e482fe4b1ac97703fdd04591409c92352b22c9d5d39c4d80220412a805cf754d470ae8460399a2f450c10ec750ad63caaf077eb9fa9017a74ec0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff0a3f82dfbbfc9d889cb7273d99d4e229a90adb34bbe258d50058b32494ae7648000000006a473044022049853a918c602cfb13fe48fd0d7dbbf0359b8e06198611c5129dd0af05e530af02203c10d0743138f826b9c46720e66a480618e962047ee02c12fbdae92c44f098190121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff0a7a89437c96efedb935edb6608b9074bbf124031b53ba03bf67ecbd9b1acc43010000006a473044022001e7225c634b8651d1924c9e1aeaa90c2e33b55bf02616074a5b7eff84178991022050ea2f0a5a9d3bbf888cb937448f61a8743a1f1158770529e6d5e1a48c0ca31f0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff0af77ff75661935213bd0892d16db7264d20be47a9199a2569d150ac5225baab010000006a473044022003245acafc7c492afda1022f15d5e6314f2459c5620b529151c4f1cc8bbefdfd02200ef972a3aee6d4d114171dacecffb311aa0a2671ba93dd8d1813bab5ebca66230121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff0b7a6d5e6314dff9f4c3ca182496a95c231cbd15e280d6b65f6f03dcf7325a87000000006a47304402204e5ba58b0f40166afe79aa4f805024a1ea91f91daf5890146fe62e92951c142802206683f0df0ef46a1816ee36d07d80a5e9b1212b8923d875286e3cd5f2636b7bcc0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff0c7ca90d414a6fc0f5a075240bd31c5f13712512564182adb7ccef21f05b48c7000000006a47304402200d5d59b37cc12c7e5e6083c8fe309675e625f54e3a609ab110c9113571da8a700220192a01da86c1e60e9aca9ac07ca5926a68d5cea68c13133804352b8cce57508e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff0d93720602be16c9e024509b2684de33c425f8a0acb30098b41d88b751fbaa9f000000006a4730440220285f84e9b91497b90a5ca8a22ce29150e3a9994b780f385965518140325dd3af022017065e03a5bde3d1aa0a7b01ac8cd8c0f85846f12f6c2470a731f61446eb58190121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff0e335606cfb595681f0a89ec1efcd658642231d03363626e34b66af262289fb8000000006b483045022100faf1eb2704274aeee6d4f063a17f3fa0a61dbeefadec5b7a3851b57bd66a622e022037ee7ef8ff9f0c6333704e22b72efc3d4cab5fec115d4dad32c5dc8762118b620121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff0ea1cfb649f86c91a163478d06f5e9277e15223d37bf20c185e9d3be097cf5aa000000006a4730440220448d807b32c899ec9cc0a9aca664e303468923ae051c32f3e891fd1413b1fd1402200fcdfe0ddb4362182ce326b01cc70d0ba365ef306a262bbacf440038e8d2829f0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff1144e611da95a5c963a0267d82dcb6d65c6f3f8b0fd05037ef28b5722142a7e9000000006a4730440220432a77782aefdda3ca3cce3593125e8ec2b187ee6effa451706073ba48384698022017d3980f60e619ffd1f015b769aeb157a8968e360fd19c8ac67d8d0a78d436640121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff11b9ba24b8983303d192fd6b1314d6c1dc60350af08c06320df436dcddb9032e000000006a4730440220617a31673d0f4a02f3d8a67b0c67ee2924bc332ef09c9869b658e84c82150b7302204eb4b6a507038178c006a71c5149a5f3eb28ae3741bd70a1ca47e1b5eb90ae9c0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff11dcd081700477fca8f5c92eaa6c69ec5ce6109b2e049b5fd94b1c7b641c81bd000000006a47304402205f5be173c9498d799c49dd339a70f74a540ec4c739639765b36ca12134db4eda022051872a55f7c5f4cfd8cbda546eb15f8253bbe19001c34d40eff56014be9cac1a0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff121c23d29c3d6c6391d3f63d96e57448b24ca036f1984b98440c416444b55e09000000006a473044022030017a06105de48a28c7f4b1edc6a530bd9904f7dcc29584b0d81abf75b6d2020220657501c1a36cbf0575ef40410bd648a27e804c5b71627e8c0ba66a4cc78bdd060121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff132987deaebcfabb2f23359d2b3db3337a7883c41b5d18c29d6a5200f42f8cbc010000006a47304402204688c3aa0a57688645ddcb80e99a4bef579a45f917289faa71dc281767065d0402206368e805a17f2548d044d3d30935a609cccb372594f743a0123668d0f929bcd00121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff13da76c0ed49ef8cd88ff2ca52fdc3e9f6a12c4fbc0161d4b480e4f146046fcd000000006a4730440220543a4ac965e31dbd3477b412e9dd96be5f152cad178d485584c27be00c77056f02206b10d39d2dbf4ae06cfcd583bb8caef205ebc434e63970ab4e512d3ddc34c9de0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff140224a4e586982c7215590f371e0892764c1054fc7455e92187abdbcb7691c1000000006b483045022100c518ec2569e99cc29cde8fe46776f67c8b979c102be2193eb70909f8a74a9d9e02207401b9b6604e14c8e303d18ad9d368ea9c30119d1e560a84ffac9646ff17976e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff14061b212628b8136b2cfc135b3f28e3bccdb9fba453a996f75b9f2574e2d21a010000006b483045022100bbfc647d0918bdd59a4f9ce4b0cd65e314f9a3d790bd505ee93d8da1e2ce420902204bf63f05731259b656f94466e1b755deae2e2f9ac958613dca270921a3628bd50121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff1534e8361588a7b188b0a5e5c42362f4fed1ee9b397bd4e9a36139e5549a1bc0000000006b483045022100a2cead800c5c0261f6ba534574ee35283b177cb9e223e20d09df96d361ea844302205ec52d9d67092d76a3246b6a82b69ae411434aaef01eb502b0a84bb9d1bf3ca80121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff165af779aaa4817216d31e781f8dcc9e75a795f1b0c5a4a6c3d3689563082214000000006b483045022100b03e32f57f622c2d3001bfdc94d5bf0490a54289adbab7e8b2a137058d1b7b3902203c4bdf449680c5268139bab2605e0679592c70febe0dd73a411d184e7e1dbbca0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff1711c634bc904bc76e71182b4a6559e5a8f42173265be9420897bc98fd50ec14000000006b483045022100c36b41325553e23043ced286c0fd0bdcdc62c98b8d925b24360b9ea272d8911d02206590f84acd379ee108f2b5e0b923a35a46952b1f66e7ffa22b1fd991d0749db80121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff17ffba4037ea0638e16f22afd6dc965c6a7dab00319900bb813bae1d516cb29b000000006a47304402207d191ee6680ad4329b62f83b42fd19bcf9f2e6508f966be09894be97784664e102207f08e7a744a05c700b57fc15fb139ade4c77164b1928f3882d8964d8f0feb65b0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff19dbe0a70544118ae44a8d53eace13eee56e1b4f3cd30abeb5c443436eb5a0c5000000006b483045022100d724563e08e387c680b94df416f7b7ae020b1c18e335d0c40dc7c3fafa36b15402203c55e6e8fdba129e6e1f0b365f79b93b2973b9c8b41335916f83c1f275a568820121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff19fa2c07fc4edaa2e6fcff068fe42e6369057523143e1cb169f10de3d5f00c1a000000006b48304502210082cddf905dc4f4cc443855513fd9118b2f0aed5b036175e9d74936c6bdd5981602204c3721c25be2b811acbb971086231d1d19585b2e323b62d891fe9adf454c19b20121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff1bb61e6a1403980af02c3d0ff81e3a83fba198c0aca95d66cc1c60836a126815000000006b483045022100d3e7eff5910cdf83f0f478e77a0165bd8ec68f8d59ca3dda60ab2e6fb1a6b0f3022066bae662721dbbba18ffc87513c62ff168a88fd9e9c92589ef78203b81f4ca770121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff1bd2b929c4952d87f8fa2b04f0694f5dee580e95fc4b078f6a0409463be3d172000000006a473044022026c585409ce7d31bac7982997d4a29185cc7db18994587a1f33cf367747f9d7102207425d8ee07603f4eb9fcad8fdb7c6b332a9bbfd6029faf09487046cbff4d771d0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff1c277114382154457f825dc7497d886819d4f76b7e8d636f51135059ce2e8b06000000006a47304402200ea2c820e206caa095602fa15f3287d2e33dd72fa39c24946455dc729090454202207ad1867b96287d0d7828b16e650c30ba1a46f9b9df0e305c28273c384826f4220121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff1dfc43b6e6f96d5441136c5011a59856e64e1201288e8b8074de359b2a249aee000000006b483045022100ca4a205362e049ec568d09c80136b765be33c3b473e606e62d587f4bc6131f1402204e4969be217232030943a5eea395cf07addf9e53a940a242947f500336ba86370121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff1e7e7d9e0f56ae1b8885c4793afa41e149540d92b9d3e9311d913859fb86c9a5000000006b483045022100e525a14a68165ba7ad63e1bb317f79482e2c18c16757121ffa204c391ea9e9cd02201bb3572a82ec5335eaf3c0143985094aa60be16ce57cf2417b07bb02a2747dfa0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff20453b178e05bc0157f57838fcd510fc89e0a34449a229a73ef3855b2243889c000000006b483045022100d565ae3050d8ffb0537a13fe31a80af56f3570c5c914a0e5840bef25457a368b02206afabff90b6dfeac3d087dc9436ae98d50a4efd18e7a094319f432ef37452bb80121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff21627df51690a9c14660e2ad543cb8ba8dc99df82036bcef0b9c374902b5293b000000006b483045022100f0ea18558ab213cf614a649a482c36ea9d86913439aac7cce28714ca025ca0800220177ecaf3c56563597998941fa2937f718b0101e277fe60be544202ec23cb1f7f0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff22aee164242ba8936caa785e82c07476b53e12e68ddfe5f1dd9be39ce6d2f2d4000000006b483045022100e1bb637b3f65c47a68c237d8837d178594d639edbd8f435520a45e2718fbd469022079779487ad9baed2962533d21095fc9f49242ad2705d8f9162db444494e876690121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff22d538859f2961012b397ae47d0f94cbd14cee3e266ed2b25a7e93e1b3d4c2e3000000006b483045022100875db173464ccf17325f51d3a8e9b031166b9c5bdbcb8d9868e896796ea7bc4a022071aff61cc96dc7ab820840347abaee8147fd6f2ab9410ad862a29e315c0a36000121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff23c644b9e7f5b08c21e669517613fd6fb525737d9e67a5a3cf716583b3f8390e000000006b483045022100ada89666417ffb0ce0d9e037c2cf17bf061cab040e6bac5c792007480702bd92022075d431369a086cdb4ac6a99898d52aaeeb17c5f80bb4b97df1911c3fdd415f420121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff2496c166e7adff13ceea3097813720669fc547d24b77a6e736c14a0e7bd06112000000006b483045022100f10ac392bfe05474be10c36bd09403f289bc0079b7ca73a774d2a4fde63e7dc40220402c0505186d9d90f609e89b570f1a1fa70edbb09cfdabf355dee5e88efee7c10121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff24fed706d186203f9dd560342716f1eb93361c34a22175d5c2ab1e6432f6759a000000006a473044022038b99bd30308dbed7616593e989603d473edbb1007f8734696247bdf1346e8a802205f1b174eb63868ceceb02034731299d52182c5eb6f6e73ab4d822c25ea75ab800121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff26e7fc225ad3bde04a68b5e7aee304f81e671c2969034bb86e7a2d8b84e692bf000000006b4830450221009fd1f1d2f74898eb87e7844bb2da643590b19d5f246ea2a2d705d208740e60f60220636e5aff5c0198855d7db9fcd00e125fb848c3b644091eed64092e83ca6e2b7e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff29698a45dff5895fa405a25952dc51f48eb90c1202652ecc85382d62770390e4000000006b483045022100968c03cdf4e215d0a0d62b8c0da9d31264c5573119e7843a2c0bb8d9b08b4de20220561d320bdff848688e0da53c28a635c403a38a77e5ff5b5661bb9fa3af200c270121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff2ae7dc104d6e8dde0757f3d91d4079995347609f7ed7ff1539ca19448b683a82000000006b483045022100d40afba7c972b2188b50e83bc9b954d6321d1cab74461ce50e2a53f1e317f88d022048b5c2c4518ba281effeea5efacef0b9e2f8ddde92f2864ef794bad11673794e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff2cecd10312c3d68b7f5a3e6d01ec39a67cacd328649a1c749002893f1e9e86e9010000006a473044022022de1e8a3232dcccefee9f00c6a5eb80b640159ab5c2a22ff291a892dd59fdf202200ce18396f9f8570546a962a8aa4e90dcb077d2801604ef64d6febb7d2a29c0af0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff2d4babbcc970b619f55209b17a8c1fb80dd486b6a41647aeeb7a1e99f737b0ec000000006a473044022003f7bd6da592c02de155a45afae3fa766c72f71390fe91401d443bd297635eea0220520fa1462e5349bb7711d3e569b05afe687a1ce07ea4b7186b2f5df6c616a2ab0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff2df87457cb2c8cdf4e61423d8f3e87a6dadda6a596497c2abc1aa78f1361e347000000006b48304502210086f67985fcfa308b03dce8f45b0edae8d5c19dfd801f0c1781bde96a9f8b700202207266c75d625016b6744313ddf232fd3e56841f31fa97105212d2d3af23fe3b620121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff2e1ea9147fdc2aeee4e05b1419d9e2c0f430205169c96960064f2aff9e78259c000000006b483045022100b6100bc8d9fc7404cd75276192a2024586588a346c9636620c1c524ea8465a3d022040e7a62e9e1408b203a17da560f8c51004c25f953de578764a7cd06f8bfcafd50121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff30712fd58c36a3b3399ce0902820cc921534071b3ae889716f5f8a107ff23eea000000006b48304502210092406df143eb117fe7bebfcf36f29547551e323b7214f3bb91a236e1c81da6b902201bfb23cd93474c75849436d84c63770d6fa3b2d54b589cb4e21a6baeca7ebc9e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff31a4c4ecd80325785186df9cbbe8d86b2856c1d1916042c36bf74b84ac08030e000000006a473044022065cbdb760628acb789db43fc451baa0cc5a5997553c0ec1add80d0f79e4e17e402206cfd0ce83fe9447264952cea06c18b4417a264b33e7e6cd0d9a45480ecd7a9ee0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff321fac7134f011439bcca1d04c62da395a53708c5b45428a64995c3604dd8d0a000000006b483045022100bde99dbe7173feb3c7db6e60a1f6943f6161685aeccef936200d35824d9bf80c02204e2cd7a96732903659910a7920bfdf5f6301c57c1efe11b4567058a670dac5cb0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3495e3f3261e01aaa8b1e2517c21f195f3f96a7953879dd9646e90ef485cc5dc000000006b4830450221008d3d5743c48ebb3f4ed9b45940c47740617eba2177090206ba8912b88a3eb3b002207f62368301552e1cce73d7a3a8671471c00e4d770a4424393c11b58fbf81683f0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff34f0f075c1912e90d3e1d5918c595d3b8426929904400c78d9b4a640fc6c3b05000000006a473044022010c31856a7b867c5dcb18af2441ba21364fd0338d80463a389774372c18fabc902207a968c05a8fd1751aeb9bd71de1d008c7f9e14d66c6651df67d8b53f2753985c0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff35847298ac78f82a508b0a47431093f582591e5410e07bd689d236de02440f1b000000006a4730440220406e6fe51356f2144f898a2673e4a86a1ce9f1b98503b609bc5f0012c2eb61f602205f4590235b0082cb9e316c054e70c178f8ad8d0204621cb36dae583e4ea3d5b70121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff36802b86fb5024986d8433ddae016908bfa1ea468f47346ece051959d3c558d2000000006b483045022100f2cfabc9571743d356d9d866ed8267a3ab40df705106d112506d2c75790dde2302205dab87c6aec30f7d9760e36847cb80c16390975ae1615318e313564b945f56250121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff36e1be99e226feb8cb89204b960ccf64744e15a6fc272c74e5f5b9d13cd60d59010000006a47304402202f806d987e31b09aa51b3b3339c50a7ca7033291bc839502bd089a08b3c56f0102205025e519723f4a578786fe000ef48e4a7d516ecb22a1b4a758121134f28737c00121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff37db5a9de63d5a1927e371eed20f8c25f12a83fde2c64e0d95b2041fc2223088000000006a47304402206c538e8255e186991e7b979cc56bb775aaa723dea3735abcacae16d55b92b6ef02201db4047198ccee178c5dc67ae9f1745c38d28bafae0817217565b4c9294243080121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff387e2bf2d27f25476a9710ba2bb7c0a05c28afe294fd07db8242e04b569760e1000000006a47304402206f9fc0347dae54d89886ae78fadd25d054eac0e27db37d8b6ddcb9c662d543c302204d2b4b072a7c57ac67a848cafee3550b98d9e648b325e0280d2155d44cdeb9d40121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3ab1adb8f86dc087bb6a3c5c55aea4bd55da8cf656caebc0392386816832ab1f000000006a47304402204b509370bace257a81c63a9347afa4ef5762e7b2c6abed1eaeb9a2022a92200002204ca2a4ab5842e705772846531335ccbb93919b60eb121dbfa8fbf4a5bc59c7500121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3b5b8f1f2bf400992a678ee80f345a7e7edbe13e2a5ce56f9d950e2d16befc6a000000006b483045022100f368f71c52a07c8042598389da93fc0527d9f83b2b79ae18e86da030835af8c20220169cd0144828f1f6ccd7339c5e59cf15a19c3b568dd0fa08d18e9dc6527e00b80121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3b86b31ec02ae7923d70db309a11127cc690108c9f653a58e3f63d78c187d640000000006a47304402204a7389252af318697434aca9b1b7ccf86a610ccf3e4a3d0f5feec9e8f0110eb8022008167da28fd5ea03eae3f461bb4e8f3e522c0b800ea4be9a3eb9acf720ab2cb50121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3b9a55453f244b173b78c483b2591ee52d502ded44f9c565c093d33c99cf4353000000006a473044022020733b41147465c53b7df8f4a10138b6d405b992224c62e05eb38fdf7365efaa022065d371eb9d9953493442bb37795bd7560646d0fe8d48959b2d625fad8c3a7a140121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3bce43c6dea6627c29527aaef7641d9e5854147008bf165ac0002a68058a3927000000006a473044022064e9f5f04be98982db5f96126638367b4a9258a0e912d52a48fe4aafccf098b102207b2eef00a3138e4761bcb46796d4ccf580cf4d73978df64956fee64a74e227980121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3caf7ffdf0d49e015c40030e1255c374bb73cbd4d0ec80f92a751a0a85002e8e000000006a47304402200a9a3ad7da2411201a8b981cbcc10fe43e2889b734250a6f7ff4069dfe072fa102204efa4023ebe399681238a88c64ab2ca67c9ea941b5c02e716cb07548e0b129760121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3cbcc4402729356e568e67755ffc4ca1ff90d136b31d51adb9a2d67e4be4b2c0000000006b4830450221008abdf550ff008eb76111f23352dd808fe494d056f7399561cfcbeef9aed969fa02205b68e04ba5cc87558f8c59fa0fc9775d53f1cfb94a6026b3f4fcd66f2d0505eb0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3cf5c39b737bd4f736d9417a0b33204b8e5e7d037ee5ad52b8c1690f4b04d31c010000006a473044022041d2605098460450d1a540ec9a1650c43f3fd519dea08176b566f5822cbc5c48022032cfec201f43076263df6c61e1a5814e267bf30f515f88c445838d12e5422ec20121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3d290218a363d74e6a0ebc12f8750a1b6b7233c7f1845cba8336a67295449067000000006a47304402205b9dc1198514815d9eb15c570530bad872a3d4d3ca52060e6c7a64b723e9749202204045911bb8c647f6e2bd1ac21507133ccbfd66c1986cb5971dbfa8c942b90a220121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3d5847da44efcf64b2b390a3ef02dba3adf9890268d1caa0bec39ec01a1c9750000000006a473044022054188ab156bd4a77e3d31d58a44d6aec9e894b9c1ebee33043241958c2671d9302204705630fd2672af7f4f5cb5630f6fd08d0023edd8b70eb74bacb66cf80361a4e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3d7f584095ecf1e804997bad0537e275c0be8956981c16eb5014554c5db55ed2000000006a4730440220190dad90c7f9ef3dae7ddb3505146a2191fafbc11d8e5ec7b92da5e7fde2a26802203eaf7bba4c44eeed89c81f5104a74552859047844560aba0c862d9907b2c8b980121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3df696233681c5d7b13024e4853c188d6fe20dd372b9a3473d2f736c325624aa000000006a47304402206a3280365041ef72ad6a0e14ea45e9ae7b60e02b9897fb23a7e98766f0f2082e02206e9871004a181f480e546bf63b3633f6d4681d3029941a6d14324383c47e8a1d0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3f61be6a7848dd634b5a4ebf1af802727f2c98fe187ac0f8f8c08e27dfe9d4d0000000006b483045022100d9fe42ab02f15ccc016e9cbe0b307a085d956a18850e9e19b0baa0f7bb84ca72022027c2b58f5adf479e9c164a30fc1b7fbff75f4b9fb78b3d25bc7fbb666f3380ad0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3f81e0d2f666190e29ec8f76ac25f54fef6c2898d15345a4b060173d8e6fe808000000006b483045022100cc395175ec8564c669cc6f4bd9a68ae66b134232ba870e3e0f05a4f13a5f769e022049536ad4502c7468951aa58cabad92e063ab9bbd7b6f0f30805f8721d4527dcd0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff3ff392bda2570a7e0965e7ab6d2a30ec72894f9e12e3d8e7bf72b8db7eab2134000000006b483045022100c849dc7bc84fe3d632f47ff282683974b3f6a3b701defa4b57b2abf3043dc0500220122b171ee711e3172ab8b90a16b948887624c674e010b89705eb40b7a10d60210121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff41d5da6f6df1260048d00d0a57e1fedb016a710a2f2bddfa30db4da9e4de0395000000006a4730440220275fbd0c721e296cd43c1742c51bd3471a19833c20e2f6459e02910c273d8c88022045e4947dd8e834a4fcde18b7f93932a6e7079e99e7e8298f281ec12c2470c1460121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff46342a9415d4ac18d2a6622a028d0690afd6a4403016ad17e1b3501de68b31ec000000006a47304402202ea6f48204e4c268062eb2a55eba3c0c0500378a9f6fb533a1b77e5fd10d2e9e02200f22ed542498da5a0b90aa75c0c318e5d4e9d64a78aaf516a6a1103b580af6dd0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff464f7670c5b48b6a19386db577cab682e92beb157cd65b964746b4b3d263d552000000006b483045022100d7ffb5ecca3971f83288ddcb13bb1b1a2a85096de4cf18cd1a7ec199e4fb151a02207dbf346fe4395f88f98a851fd425ec9a5c5dafae9d854726193c1ba22778bc4f0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff489070d169220da6b10583f34f923bc423e8e8b67269284f41e2e1b989dfb726000000006a47304402202518270d2143a4542d529fc41e514273d724750a749b345d0192218af63eae1502206e744eb0be02dc54e1388abee6fecdcf7171f5bcaf40190c332980b62dd2159e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff491abfa895dbaf240e703fcfd5a61838c2279381d68edd4b4cc84ca301deb800000000006a47304402205c6772a33722b5282fd9f510c8337cbfc619718546343f3487e9500ba140d03b02204ff229df9106e660522ecea15bcc91ef7d1a06d5222befa069b179a58d5b2a5a0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff4a03845de787f4b18efd0700f9c1eec02e9925a3de4b47e1b3147d3bc93dd6f8000000006a47304402205c0a2b7feacdeb4b35f9951cc1aa449c8339b254c2ccaee090a2ed52f76224530220344444b6c8d49e866929b05bfaee16415b46e730d7a4befb887c2c307317249d0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff4a304e554f61b5ac7296ee5c101d5e85a2fba9e3c2e2db90ffb8f427c4263281000000006a47304402202e97c56ac00b9b80ef1d4dff7c0afe0f2a1aca3d93e24fa84ca2f7201e7154df02203960f99500a4c4398685e8496f63f32efbe00d3b627f5cee4b3f00a87f0e3ba90121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff4a6d366bf75e83602e5894f05dc240f26b08fe8c4815bed60bacf24ca66a635e000000006b483045022100cd2f11218e111fce33cce33af1a77ae24bfe5a7bd7aaec26028ea174e14ad3a30220422c52d3c9248ac54fe3ffefd0bcfea4c22991fa2f556ce2beed669f1dde1c7e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff4b677492927075dd74b791bcfa3ba3c29afe096c182268bcd0e5c3dcb8e2093a010000006a473044022005e7a6b24dc791aeb7b39285f0da5a64fdf02b34947b7c905fe9e92c5454578a02200a83d8aef248e75bc19da1611fc617f485fa6bc091b33570a3ff88dbc09e7db80121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff4c767e6514696602f65ff639548e0e45a01b4b5ea0a5b34a5d7a0f8944594747000000006a4730440220643b8f96022af15ac3f02868f00f198fa443e80c41f707c0ab200ef57edd44bc02206d1356317de110d67921858cd253bb1388e70ee23493d4a1f338f0fae6050c290121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff4d14755baa405c66d7ab60dec12d0ab00902d25bd0c0f861efe452644ae2cd82000000006a4730440220129733c14fcdfc5b8ad79f5b0ef5c19d89b68189e08e4ea0211d4951fa2bf0bf02206bfab3c268e3782fb1fc02c30b3454da499f5592336cafa0a758923359bce57b0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff4e0399d0edc01ce0947624606ab03aa404658a83e0856338782bfdcd42a3d8fb000000006b483045022100ae7b2649c747bc9ad0e63a0989e44101b31da666d06a3d153ff6c577b548828c02201b126c2746e4266a14803e97d57e82f74e31cd232a30011cc0f4147b427500a70121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff4ebd82bcc5922a02fe8d1af639bdc47ef328f44a0d90837726ff933fa01bd91e000000006b483045022100faa88f39d43a5fdbd7d9115888624bcfb16bdb28b947ed9efaac85184cc2638302207cf863846249fbaf1b4f99ed6e22760be44fa8789805273bbbacefc67aaf52e50121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff4f1d6f448e3470311b1db52687e4a143fb5053fb8136f33dfd4697fa855fcbbc000000006a47304402204bb5e3e33ca254a88e9bdee6abdd7e5060e0bae1b01353caeceffdcc37c251aa022067d4eebc90632af8fe96c05cbbd63656d5baace8cc0de42fbb904dca4eb10a970121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff4fb633db6d569eb6541ea9988e7d7caef9eaaeb8c4d44ebe29d069908cbbe0ee000000006a473044022050853135a4d7bc3367f1c2541a2a0daa650b1e5dbbc0844232aac207be988a9102206e9acde4872712e1a055d56611c33aeb763dda618ef3aa5416382412eda212ba0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff50c781bf7db9d9104f55d0e9f2797184e637f4fd374d66f7f5362223a5d6cc72000000006b483045022100b41c10160d2a1a3f25968a027396606484207a0f37ae54c086fcb89e72ac45410220494a912f55f88cd44e9e1b25fb8064b22467c27b4519609aa8b8c04f996ff1390121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff5255c2da9f8cbf83e51b098274b3fe12ea28f6faee738a3f84554100ab6a1220000000006b483045022100bb2b823328531b6115ee60db0839b80c0a6bf16da985032ab3f2e184a61b36f702206374b8f9d9c64a8bbf3c77f107c3cc5ebcf74bfc53be3a42d95ddc33f91670ed0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff54a45113bdfce9cb41c82bb9c5768de9fa9a07e9654000bb19f81e85ba36d634000000006a473044022059f8dbbfe0981badd334e3cb1eb7ddce6160c6cb32b1ebb50d9a905d521a679b02201b6739de34a866f65a8557c78679c45266c0c712fd8e5e587361c2535c65ac910121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff54ac5e0600d738edaf762d1c0b1ca7e7038cdb3e1c1b3aa5f09da5db9c6a8831000000006b4830450221009a1465a45dd82b0c9500f99f5d48fee07e0aa678b6e774021811daf535849a5a02200de2512010ebe6e219c073d34d30289661defbea7c166a539dafabfaf69ce47b0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff552b331a02cbf56812a3146ed83f3cbe0530ce79283b3cb6ba27c6cc03b5baf6000000006a47304402200e2b8a3a184723626ddbc2ac1d8070eb88969db1953268059246f5555377c0aa02204cb8fe45403ac98aeaa6ad384f2c2f91c19d5a5e3ca623fa44ad5733a257e23e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff556c759f8f89347b7d81852e856fc3d3d39e37b18c0efa5c056acfd7ce8c7fed000000006b483045022100b87069bc069d6e6412037fb239b42c0c538a84f6c25a8a191a30c0f2c4891d0602205d6b24a8da8a42c88f4669cf86c5c92260fce35e82242c7e2ce249c5613d42390121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff55e57d77840977165a934b5cbd21edd27bebf524cd99898ea83d8398c388c0e4000000006b48304502210080baed436cfcba7cba167e3172a7f8047298d9bb00aa13061c6b2056bffdc423022015d7315f37843a4b026a6911d838c937a81189e05ffc64fb23511330994ceb060121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff564b90b39e8f114b3ff55a225f6ff8d03f26a8520d6d32a4e35331a472e0cd3e000000006a47304402200f5305143fb9d8aa7203f08eccb78086a95131da9163b6f832284c799779bf2002201bb1b4787bca4cbe8a27f74e5f0abeedb8108e29367ad8ea5a50101bf1ffccea0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff5b79ce3097be6f305a3449f76fe4f8288e90d60238e305aebf0c763db527a219000000006a4730440220465b8359087917e1e36a81c69d9f539bda9d6e191cac43baebd2e3ff791208d502207777621d147022bee1bdcea0cdaa12ab87efe7c7cef61d38a3a44502b94f53dc0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff5e967548c1e73bbe0ab2f4eed4a21b6de2a561eb58efbbb0ed6f6e58b9221207000000006a4730440220698228e2e5594ddedcfa4d81a4f2fce7182cc8b064a1411b0c67cc9819161ec302206c30b6de9b68fc67f3a3faef2bd99976fb22b4df045e8ac80b8504d831b7b1000121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff60e7fd457367701d86110f6676503c1a162a429817437f1a319d4f9defab9d4b000000006b4830450221008382e166ee8a53231f54d39df8adcbbc346be5a3912afd14a1e0927a64fe36d10220758f2282d1684cdf18d3fcef298569091183d9391be130ab8e6e4d712ba67ce70121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff61af2435668c59e576023da02717d5fea7126773921517b6026534a8b1f7d101000000006b483045022100a058b60d0e591fbe917236606699f9f781e2be234bb64dfb46b301ac9a80d58802207544dea7ef970b4bd9cc779a944263d24369b7cde6c55fd8d7d1d965e01714cc0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff6225b14555cd588e58b9f712ffb775f96517cebd42cdd2c96f7c7841f5a0c63b010000006b48304502210082c8e59eb55ac9e6bfe044f2ed99ec06baaf7405ced4f2370c344cb027f8f540022072acb30571545d3834b76bede21593de0b2b69337ed9d3677a8448ce70912ea10121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff6318b6b2c2172c90fbcb8345fc8c924a78da1a23fcdc98481ed77fc42b98defa010000006b483045022100dae6cb5b4baeaa012e9da47fffb91be8636bb20f0373012b6ff2cf97e3e99b5c02203f5cbfc3e3f6df01c9b044439b078589a582b148073d674a77a85263f9bd2af90121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff640353dd651b1466c5747ac2ad11fdf7fe76ce6fdcda96df4146787f621ad6f0000000006b483045022100bb786a7fab06250e5542b3c3b3862daf34b665f39e6d501ec00a36e5ecf3f9590220260160de386f2f12c7266f7f4274e3e11e6f53deb0b85705ec965ae623e4d6ea0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff64281aac73fb9cd399c0376847ccee418864fd1e75d3eea3889e09a51f6705b5000000006b483045022100ce4b03d8992f5171beb496c0547b0c29f5be6db5b9058812b41104c3113dc254022028918cb222df385692d16f90d685237e127febab17ab6b0b14919efef8920fd10121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff64b2d4a0af86ae2f8a7a7827559de9b11d5e44d1a6e98b69fb1c5dd59c8ed0dd000000006b483045022100e33fb71d10b9d736cb613f322e9b362ce3a83c291eefe07546e7ccaae75d459a02201b3cf44ed4069ac6d34bde7e7d21a28d8c0cdd81bce9a7b1803c2cf851e5e3180121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff64c87a3cb6922b581eef454527b4c7bc8d2eb194caf91221e8472eb2c851e578000000006a47304402206fbe0263e0c6713bb5b2e4c89f96eb1bc9b989caa3d75f73ffa25011673a9b4d02201b9b170fe28f7f9e87bcaaeb9d44de0187168d5f1004b96026ec5c4d7b8456080121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff64f9b148882f43abd5961b880b782927c72db2ee1477de0fa67edab163e19eb1000000006b483045022100fb781b6d8170bb3695f3e8653eb1a83c64669888e339c915ea4edcd2b230cf5402205a5b6055930956b454875b2286777437b09d15123f30b6efbf32d614338c1faa0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff666c83631b9cd89d3b2afdcfa0f7945eec9713e26e874d9464dbda04db0f6f47000000006a47304402201128e9708bd01b91991e9fb0fbeab699a06e696889a54711d4b66b4bb73554ef02207f5dbf77652cb4c1724642d3052ce5d97a65f69e22af7a74e1e0e4fbad65e6ee0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff67a3d0f12343177d793fa47d8d3578632647f5ab785754f9fb808e6c0964204d000000006b483045022100bd51f91ba0c09c9df22f39608b500a02471e9efb8918d21b34bf45e9025373db02207d5c012abab7d24b766748632ed16f8b472e2ee7ad2c2ae71332fc2fd65ae0e70121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff67c628f20d85589df43c9183a1113ad899fe1cb16c7c7862e661a4de94646d90000000006a473044022038df25b36a85ef67db53766050d706efd0898ced3b041ea428f55ec0f6115aa0022039075855fb20629717ac05facb5b31f7c1c0198b71b44a161aa817c0f71a89e20121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff69013068458f33210d89b959f0b84c1205f60ae223587795e899c77d3566f151000000006a473044022071ad69a2a847de9d9109d969a361ea031bfb202fbdb082f19f5c3528626d57f602202f44cc6d08509e05140a08521a54a3c86d53d1c4490f2063d30388979799d40e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff6a2a3b75e260357851204aa333c187e50d28eec62575e294f559e8e925b35c3e000000006b483045022100f48b808408fb35a01cfe97c00b273c864bd72cbe902d5932a0f7aab82fe8295a02204e8be3a05bbbc7fbfe7cfc13ef656ab3b41e31926c4d40241c1d8089527520e10121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff6b58d586065489d747f3ec87d37ba56a8d769ca2d1929b35d267f59461a5964f000000006a4730440220581fc2c176d232f0b5825e8c92888465ba008405e1d202bee2b3e409eccb56070220308d8cbed282effd2496742a148f59a017e32b76d113fd6211904a5f5edffce60121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff6ca5cc6c890b5bdc1a9cc9d8e0e597cb1911b962c3aa78a762234d0949e9fb60000000006b483045022100f9abe72e09384712088f2689fb05a3791d983debcc5d0e040eb65bc23117b60202201ddfa4ba5518188a27e2d4b18dcd0c1b7f1174ff5d135a80243f54a1436754ce0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff6d05f3f04b3b550be8d76d8c460c433ce4c8f25abd7b055dd6182edbccc5d430000000006a473044022043f3b72d1b041a9879482d5cc59aa175d9a932059b9cec3d2784d5a9e06d0c2f022015e199fa1962488ed68df0617c17ea5c9ad11ebdea32c4f7125cb2a32644f6c60121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff7246f629f35099b69fe796449a82e1e184760b36fd28e4440a79e76280752350000000006a47304402202fecd492cdb23794b0169e71746694b61a000d51ca9e9d277e7dff6cc60eec0202206db01df1708440c49790587d8da90b175a3effdf07e203a794b7b03dc70a59390121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff73ce1fe602e452d78a22e9b56b064a8c1d0f0700d9c64e0bce540a420063fc96000000006a47304402207f2651e0570c94f2744158a2f80a2a728f8b787619802a1d5f5b6eb69adf1165022024fa788180e11f5141cb9dfb385dac0daa294c9ed1ab8482120c1742c1df8c6e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff75d4e39ebc26f1d70010a56e401ec8e5d901e697bd50af8be2cb2aa91ff43410000000006a47304402202da343f99739886b254458b6aca79deb9360f3f3cb4b9cabc57df7e1434accef022006966f4763f6f2994fd20f324f1d277bac2b90a302aca1d6dc4553c1497fbab90121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff76d86ca0c7cfb62339bb8e8146fee2ed697b4f27f8df11def0673350ded80fd7000000006b483045022100b6d3f03d4403d1e9a6fbb70e941a0965855730c938a7e58932d50ded368e338a022057c1d0d778632ac221521279052fd47090b000978561d7a31304600e13ffe2690121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff76e3759ba3c8217aa00c9245b2985bd661872e754edecebc55ff61b50c0c22b4000000006b483045022100d26f291f14c9b4086e6e15df2389c4dab2f211499ab3f790d5b31c89c248947d02202cb32df891f87017635ad32c7597962278f61b108f9026e4b4b19650a440ae5f0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff7807abedfc28be758a8442a64c0b0035d6b72474c0078106fce089a991e73453000000006a4730440220043a294e1ed03ca72fc88acf8a810709a3a62baba8be8dbd961e9a19a98dff0d02205548d8cdd664018aadf7f22f58cc946bc4aebd8c53756b8f595194fbfd74410e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff783afd18b72d4094f2f1b72e30449571daea5482341745af5be9d170277ad309000000006a47304402206fbac02aa8cf99d5216efacb777ce13d2bd0b56a77f57d2cc606fbab5778f47202200f2de0e997c04b4c7ef0feb00ea7ba40accd59a09118e70b844341252359a3ad0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff7b443e453f084e7eecbc21245608890aa77cfbf48bcc53c10c8ebc67eaed138d000000006b483045022100f3f61b7fc130fd659184067edbdc5c62e2bce70417af59b6c721170a8f8338e002205d5426322db7441fb00bc2506657e3e6a54f0a1f16c3a5aedfb1aef702957ba90121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff7b4d8797d983b1fd19c3d7b87f22ed664b6af1d5d8230146b4aa0d2b24184d83000000006a473044022045bfb22168c425df94185f7c8f97a1f53cc40162f688a42508dadf61fbae5d1602205bacee7448b74c7fe8f096572b6749910bdd4340fe386c0eb1bcb318a9cf2da60121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff7d342fbfa0a6209f9da59bcfec6725791de1829dc80ff9d5d77cf9ed4519154d010000006a473044022078b1e523bc3d4f7bedc51ea3e54144087d923b9cbc1ed46746592cd8a0d764c6022027914896efa68580b8e52919178fa239664c4b9436a3cb6451b4298f2d15ccdb0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff7d9d27e0d4c95299fef82ffbd0fd19a61dc94b48984e7b300cb78efdc2d6b538010000006a47304402200a5f1028051a65388420061b83fdcfc87f9216f6fafbcffab812def05c17eb7e0220036da2434fcdd0249f11a885f1da5ee812f209aa7cd687262c482887df9758240121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff7daa075297f67fba9b4a80c3a45d961d7bca099c977c06b82cd1d6369a394374000000006a47304402203b7693db93ad2d987f94ba7a7f1b5a1c188a19a6e1d37dfb7d79b1fb0fd45360022003d84f40d71a80e72e799a094ed4b8e1f834ea7b1f513c6f3956e5c29fe4ef720121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff7e94487fb72e7bb972ae5cb61afc949631e06d704fdb23ab6522f061a27af49b000000006b483045022100ea988b3ffbdb09abde8a4710529e4562268258c77a0b2da2ae713b489087923302203044c14cda43891b99ecfa11a9c374740fa87a9b4ac61ea07bcea7e216a88a720121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff7ee766ee8e6ced4f8951056803b8746263e4e59b4d8d0ccf16b8fcf00b8ef1a6000000006b483045022100ae2746e00bf5d29987abd60c56aa6ef061122cbddc3b7bb57ac06e388b34fcee02206b2f35facd9353c7c8f24df1978c3214f5d2f39c8dd2b51f3bb83118618e83b50121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff7f21a6fc1f33e7e2eff393350b868fb94fae21308c23833a8fb2ee7b61d78629000000006a4730440220626229f2a5f98ba4133716a6d31475e2bdd769ab7f7e8e399a45b4a2cf060ef10220600ea6394b72dccba13fe948eed57af1599425df5ab3ac9089286f44f9e4a35e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff81c0a7b3b4d522118b895572d735dc1c3e9f8f3c67ded177ee164adf91d4cf85000000006b483045022100f773c998772da0d0449ce395fa5055fe4808fe11afcf28d7fdd90eb5f163e1d70220660bc99e11f143ded61954eb13027f7ae70aa739eb76b60e4a1320325829f1dd0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff870d474810f7ad3bf704d0a1ad87998a2788b56abdb031983d70a8953c2553df000000006b483045022100d6096ac1e54ab364415e1ca43c989570dc2469ae3251e4773392ae36349241a302203b13f7c9d36bc05154c3ae4118db5fcdb50b1d30e96a127385e715b64d9c6bd10121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff8741b266f6672e440347d9dd06bc69dddb9170c3e4566e6bd76874fbfaef3e8e000000006a473044022023ff4b7be3441aa30c825dc8d0adf2bc4d69f6ee0997b99495cfb3f79567f07b0220193ee53be40de24039009e31d1a28b37d0c0a321c111ac5dd18b9a217a42d92d0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff878fb51a1530c087ab76b6bf87d142710393a3ad40bf94e2ffe698b1b0d221d5000000006a47304402206ff1eb701310226087d7c043572527678d18a74b3e62f32ad49645dcdbfb1d1e0220615fc5cdd2a2672bf471ac0bb216d815329420cd4950ea00e63a308f12dfdf6f0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff880c8c94dcec88fbbca3fad28361b642f42818279652f38746e5be1559c69627000000006b483045022100d340f924beeef1efe1b5e13587d52c79721da72eecb6e3211f570d763cff44f702200efd7ef3eb44dc6db5d14045606f381a4c226b2ee3f366e0684aac26955779870121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff884b318994a94ebabedf599d9308d38fc619fd039f8c05f9ed8ceed47b873da7000000006a4730440220267240bfc3ac528f04608540ba919791039ae3ff4ebdacfe7457f8116d0abd3402203c40c9d5cb185de974905f28cfdb92a2ffb281e7e9eb678c568c66c8328d27990121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff8856550305834c72267eead8a00d792bb89c84cf50ef2a2cd7c5065c2afd05f1000000006b483045022100831ff34e5b3ee9e6d0e1b5315e748381fad1fa7c73a46ce6beffbf6e46b98d8902203fe97803c3627ded0ff7948969d983397d344616aa1c6bcaf614f0f4e1696ce60121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff8888a5836dbeb12af6e2dbf85b2c346751afae11a86ce966dadd0450862a1c42000000006a4730440220037e2dae47e1d2161546a96c69164b0b840fc433e32f1db86d12d1a86329153a022067a91a4f99ec1bcb649b64b9b38926f092ea6a7c498607019822b1955f8951e40121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff8981544bf927fbcf89b30bb161074c3552f131b1671c86ff71d7f0c35bbcd16a000000006b483045022100feb0efe8129f3ef26990b4de5241a1dc829577fb0a3948bff1d0eec19f61b28202207bab7820e63c8511d08210e18e32d933db3540bc338defad8c3d471c6c01bdc20121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff8ad2c279c8b75ae9c5b4c52145891ca63260b38d84bfa03f9eee8b44bd5b984f000000006a47304402205a733cbb555544e7dd8e38cb500f2451970f3cd9c03a0acb5e3d7641dcdc93280220321aea8a8df3d8b9f207378ded7bafe300612535c6a81a569ef1000de24fa6ae0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff8bc22a60e709e196545e5fb3f33c2019a689dbfbcd58fb5af1039f42572c7b31000000006a47304402207dc0af02a15597eac63443637e8288ef2014149f271b2efb7ebc50b49251f8fd02202b583f57bb83551032bcd7c63d8cb2a43d389d8f912a998096a3d661a7a125960121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff8c16e73d249eca0ba0719db799da102db9cccec82082dbb59d5bd9728318f1f1000000006a4730440220665a6671795482ad3bf9fbc50867f4528cfde28af066a7128f25d6b47299f5e80220532e7f48260c99c8eec6db4ccc06baf1be12375238e65f62025f996f798e875f0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff8c1c6f393453251249cc2bbfeb7d77b5b7d4d143221c4e5870dfd761edd5137f000000006b4830450221009c58cd27b2f16e9a0cff11369147f52d801d6261edf6d4cbb121a0ff9903d3d802200cb3c0b2d0d75bc8e995a5dd4a519755ef043b5abb9a76f9485e07b679a815510121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff8c57426347c0f39030877146e80d49ce84c911017f3f44d7b0b7e5dcf350d751000000006b483045022100a53e2fb7a3b9df620434e6bc550f7d512c3be6fcfcc66aab58837e7af262a61002205759a8bffec998730b806ada49b60cf60d91e04e4337637e4e329b8f91219e950121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff8c91327228a160fa37326535cdbb35d8d41f5817e426dedb599305b0e6c36af8000000006b483045022100b30e8407227897c1f6da61e308215442658bd20d2d2a3e57f6ec74437fc150fe022028225a09c718ed86e6320360ca14a86fce90531c863e4ce2fe27639efe78dcca0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff8d82b4db7cd6a27624cadf6099769bb9fbb7a12c8e77d9b564f0628ca00fd7ea000000006b48304502210093d87d5407ed1d7d923e4fdd352d477cdf802f038fd2bb7042ac24d0bfb1dde2022059952f5a755a3d4491b5d5c9405eb211bc5804727f6137c0c6b3577c8c193e0f0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff8f92265b8a2281a0eca530c678763a555d2f9798b3fe60b49d01b5dcbefd61b4000000006b483045022100d37e5962b8d274eb29aafd4cc3511990707f01dca5e425ed817c8d5411096c0c02203928fb735cb4d654200bfb408a2e2b6b4ac47737229c3204cf466abe9c3837500121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff92fe1250807bb3565a974ee45064854cb50c8bf8d136dfecfc134dfb4af53816010000006b483045022100ddb2b093284838dc4b5db91401ac72e4802e1504eecf981f511075828a809e730220394fecd573a5724d0cc63e14d1cd9c02fc8797ab2390ea9d6c10823967bb2c860121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff930481d072c7637e0031b3281365bcddff3417b7e8decd69e60a4998a28128f8000000006b483045022100d3562839c77a35cb9e210b9c4b9bf20db559c03269b29cd5fb20fb8211e97d33022046a7b33205624b4cab8ecad786a74e9ba80ab7b7222b1c0b61f84b49575ceea00121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff938dd167bd43232bac8b71e4cb31950330665d86639497aa879bcb10b7e81d74000000006b483045022100ed5c15d3f3cfb09064063ab88188b28f9a7f2a0fcba2ae737749cd7bec83722b02202064a51422807cf4fe93b870fb06aed7752c72835c7856cd6bafeffaad4a731c0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff93bc631439f6efe44f8a13771395fbc103ca940ea903765a6ba23569de04506b010000006a47304402201fbbe6990e3e9ea3b281c8ac346d226f8010bf0a190686c8a8357364ac461fb10220088dad99f3c6f8333a8642c2a4f4437689e832a8af7ccb309d76d6f30fcd94f80121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff93ddcb60d077910299ac32a12d32cfa719d3ef9bda9b324be6230ee944ef3465000000006b483045022100841a811609895d89468a171e515adf402723090e5a85753296f37c2f4f97ff5a02206581f16350f91a8d57c0340defe9cd324bd9936bac23a65b1cb8bdda608636e60121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff94015fb8a9792971f60c765ddea49a324b55a1e7620ff0fab35db0177b510bf4000000006a473044022032353b41eff05f2accc21415adc303e6a64d7cac41ae117a2b5f0037d028501c02206c152301f758787d4cc151d1bcd7c803888532f70d34ccf334e27470a39896e60121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff94a248fa927ed91b533ce296a93ba86953691652d47a394b3555b14a0e689776000000006a47304402203b63e11dbbb00a98c932a68d9c5089747b9f6996cbdec2e5b0e62f643dfce66f022056cd9296904b126bf132a84a7bb9e6411969e5cf4a5549633770bcad36d4dc560121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff951f88cc251abb718b7730619f7caf1b9ae76f56a88628963c42f848446a3f63000000006a473044022069a6d69b88a3d1e124bd7e5789712487f884e2b2eb29071ace42e8bc43eaf7b70220695b735363c04eac48f1cf4025acf44f2abba0ea823694c5eeab00feb8f869610121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff953261971121afb8e7fca6736f2dc8bebb20a10b75b67392457f213799d7e38c000000006a4730440220309f99a51c47c472c16f61739a2bf0d63e55ace758fce13601db030b9b6a26c90220554e1483930a474ea285543589866d49d0b5a7fd96c75bd1de174da14915b7550121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff960f712676591c0adeac2ccf43c100e1a3ba69378768b5321396294cdfdd52cb000000006b483045022100f96a813c6e50b65e93b497b3858c1096d539dba30c6f527227efcd8039caa41502201eea43d73fbfca64248e745769cded642915c3c749316efffe563819406db7120121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff98d7d638ae27390d49a3b168ae071208e58e90f56f65ef3e9904fae9522ac24e000000006a47304402205417b042c5f6c485235809e70e6de9c20fc48fd64d887c3aaa8905481e7a53ec022070398c54ced8ce1753ec52648d877cac1da7ba84ea82a557f0d109902d6104cb0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff99e279c2e45e8bcfaf23ca320810bb1dfff2aaa37a3e13e3da3d4c6eeeb22322000000006a473044022079aa99ede566aa7420cf6c584f42a0bf366e2df46c954ae616ca4d48684a0f700220729484ab6e035da727463309d3021eecaacbcf411f20abdb636f30e26cb1a68a0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff9b3a0bcd9acbde4924fc54bdfbda31a744f00e1109062c70d41af89a4d7a5852000000006b483045022100de6bdfa72d6086f9ef9352f73c6144797aa70c673236071a710ba15dd7afb00802205d8201ba1618c67ff75a79a7281345466ebb6fb55c1178f10cd61a262e3a4fcf0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff9c5b101006ddbb2b23601a3c38c1573716daba5b48437ba408cd30ce1baf50d5000000006a473044022050ee6c8b5e8931361f090b1bc582a3fe0d027c4a742f55596a4b7a1f36d58780022031be1d4a22e841a114c55f294e6b25591bd0cc51689d9666a37d283d5b3708ab0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff9cbb4f16cab66bf6184bec5bfd64737fa2a8d1aa81f5cc4d8170c0dfd50bdfae000000006b483045022100d9054e8955d89b255b4b9691dc3ebf06884953d7ed140edf3623aca547f8bc2502200d6225827de12d54374937a2caa0d65712bc126fb0ad1ab17696d2035de49e740121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff9d74732e709107dd101f1ce39cb66d4075166bb733f0150a48f3a5408509458a010000006a47304402203fb5d342f0a6a5a43dc92b818f3f6730c25edc794c8d4ab5f313550e7d5cff7c02207cce1bf73bf48ee99d524e65043e96668f984be9f7e733404392a72ba527ab8a0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff9d92a6d54de45ec676d0667d71b1ac5ef596b16f56736de4f20c0b5d8fc31037010000006b483045022100867340a45da15c8c5b7e04ec88abaff8633c5e6ddcfd4aecd209de14ee1ccf9f022079893d6215893d9fdb63ec6297f560bfbd9d2595244e29e541e1b941ec090d650121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff9e2c1a3b90d7a40ac2a67394d6724001ef863e3ddb698acf79821fc82e8ca7ff010000006b483045022100e998effdbb51f7f144bfe7ecf87bf214211527e04d3d0f4d69dc060b02fd45fa02203c34e1076b77007e6b198ddf6181ad11028a8c077beb01637df2d4a802186b8b0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff9f5326b65079fd90ba03d4c274982c791d35d3288f3a60f1d6499e4b8fc5ff54010000006b483045022100d0efeab8353361b23b9a444e0423fb8d233f1fd3ef3098d5ec37ee30ce16de5c022047fd7d1935b112ff265f03d24dffaeef6d355997e5ba13b5bfc29c57734489480121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff9fe7dc77fe8f39735bebabe0ba079003956c59e3f5499dd5eb88483bc75ccc48000000006b483045022100dd209754c229823f4ffe7c61fb9831f397ec243865d254d821c145b9d6cad3b90220624e3062e471fbaf0ebd65ef492e0b5d09f048c491f609dc39d68c19158445cb0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffa041a8ab6b95762510c788bf39da9ca6641404f56883d0b3b4c2699b14e511c6000000006a47304402204ec163f05866e53c346eec4c8da3b4e9cff91b864498d71c9492e3ad952460ca022042aac30fd46012baa62325f46c347c9cf5b512eae8ff66fb7eb9430ade7ccdfd0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffa22f3562ee44f62c95f6694467a15e06037a6506e7298773ccba3cb31f653392000000006a473044022005281648daaebcf84dcaa7fa7bdf6e81f40ecacd8da590515ff1bdc4683e974a022017b49acb8b1e85c2cb691dbb12fe5ad6110ac76a64d5aa42e2d2a017e24ce5cd0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffa3c78a0a721cb68de5ecb549a9f40d86045970bf12e077d9b06af1f5fce365cc000000006a47304402203a887eb15cf545c0aedb8089898aa8510591235d9dc87b19a3b70d8afbf96c7a02200f75943f8195de6e66a4bbf293f49c766dfe8e313bc04d6304fc2633f27f761f0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffa47d43fe6923c76f855957e142681bb26eedf1910ca2461791f4ab5c2c1e59a9000000006a4730440220235593a50511479dd44d721db3b85487f4a716ada57f5d8a6c68cc43f3b71af102203806ecf4582904c330c84c000a0d9903de595557d31f59f7eea4f395a3b6185c0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffa781d1a9ba1f05928041b7176866cbab71ca2e19abf43d22348c3b20261cf5d3010000006a47304402204aa2e51e85b5e63ca132cac6c1a3eb9154a1ce4496d9e975b5a50352351c2cee022044c5b693f737410b694a608b0a3c7e46abfd25126c056c7995e17c022ded51cb0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffa7acacd1ca538a36045bd0bbfdb050c9d73aa070717f897dddb102831411157b000000006b483045022100bbf6ef0363b250eacae23a0408ae7efa03f900bfeecd1f1dd4d17308e2247f89022024412a1655ef2656c70a0c803d27d106396c09cdeb67c6875ec28b20812a43310121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffa8d24b1c948c48a06bdf525d091ad0474df246c8ba728de33a8f523664fdbe18000000006a473044022050877b21695de88b83298bd7e8e3d6cb729ea43800955027670f8f5120c7aece0220581abde2a3aeb2c45e5eeb58b328d4f80185e5612fd194da0e7293dc3a0e63ee0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffa907ee329d49db29a45711066a0fa1e0a9f74db46a2f18f4a3faf1f69d0d808a010000006a473044022008d121f1171ff2b68399e9509a37ce7a98a1ccf843b38c1b02e51cd7fb1387e702205509f004a6c11bac4745f24c672825a1c574504b07865b4b024740e730f7ef850121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffa91863c48ee31bed275af6b32b98723f28909a5862596830aba9e2ea69933686000000006b4830450221008a1a042dc8aeee6f42c7ac396bc90bf6ad2dd3fa62d1a5fbb57fb450384c267a02204182f41932ae3d17a3938a6dabb3d94b61b6948c01c53675fb99d7535049960b0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffa994d6b5eaaf2a627917f4f895b136b3da8c4dc23fe1d1498750455c4e36ac82000000006a47304402200458c25e2e65d08c8c8fec6f7b8d785206faca8e4988e81dbb7b706fc921421902203dc624f7484b951149a24921256b0f801add4b76af8bbfe4f6917a042974c6380121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffa9d42497eeeb81bd04bf03840479155e86b390457a31dfffcf4599b1c8a73a35000000006a473044022023bb94feb30df847b04500568b14d46dac9e3cbc4b45800588ba83e98ed43c5702205e5d1e3e9c982628508a20232bc151a5b6b000a2a52e704214ef271a56413e4b0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffa9e6189feca69cf5fb597760a305ec86a76d8d8b10bad0c66c18391f88787ade000000006a47304402202ba0f7757ffa6f4d99d8e38650bf5987c06b6647157a217a6ac9e18699501c1102204558eb5b3761ce64c2aa9d0e9185962457b1f4a4714de2ff22b45f063bc5f3ac0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffaa2582ec0dff9e8ac119836446f2d662228b683eb263c9938b641352379996bc000000006b483045022100faa07edd491147f70e80055a59961c64233a9413af2babd3ab84ef3e9464bbc902207412d2878403ff844bc961c1027b39c6d865849dd92875856e2b3bd2679365e00121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffaa519c67c9eaf7838cbf46ab9eacfd6acddc4d7552597b393a09f3b8e8b12145000000006b483045022100ae0359d654aea81bc500a806e62c5a56a2a96b92bf0109e2c39b01d713f4a06d02200bbe2e707c1b56cbabbae128829feb0d7eb9da6dbd23ae57b085a25ca3942d910121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffaaac35ffb106e7cea120441728ea3422aec289e3853c0d06e06e7505c207db7a000000006a47304402203a8d3e5844c1345a7dc49c9719898bac9ad73ebccbb666ba73f6a6fd655bf380022044a3d80745671a6f6ee7f8603b3950aa5fd881669cb36bec4dd19fe04a2c70b00121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffac30ba083c13f3ac8f619b830f55c5771d66acc2bb3288f3da9f8384ab253fa3000000006a47304402203d8ad96e4279e1333fdaea795b153389f80f14725cd401ade32294fe3e2994220220229c1e366e8050a24797d89c32fd1555602d2bf08a602b93e314cae434c9c1950121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffac9a2f95c204216b00422f1a7809910b41a6041fe43eb7644556ffe43d84753c000000006a473044022007c19ad7fe9cc7bb9104efade29efef1fe44936f87460ab5a28b5b56d21e9c6002207fa43dd27ac9cc2f90c3def2fbeee12515584e1d46696d2dce8803eabe2c12640121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffad86683a608cbb55849f6bd1d72ca533b4b0a2421983ad8d59113c388b74c3f1000000006a473044022062ab1854bf94cb32586e3ef3797ec1b7ee3ea1410082ea5671723d73476bdf44022027922c0c34be7aa670e82a296e4fc114ca5b92b14f9d59f4632572870ad5cbd80121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffae23f677ee66b4b62f017e93df298a54ce1035c600649470631054d29be22b01000000006b483045022100e60cbfa15599f66acd7f3d914ed2b6709aa2876b9c7cded6b0ff4e38bc82c4fb022047a6549abab689d61f2eaefc8e04b1226b64d49a67936c27c969934359ae5e060121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffaf04f9c096054d7b5f0dd85dd525a81865efeb2e4675a9a67c84ef22f18d790b000000006a4730440220104150ccd8e5a689d843e555176be5c1692a120696f8d0c6f149fd4fde6592dd022009fb9c07cefad44cd31fd88ddeca54b49a4ac1f00c451d2857345cb281fb297a0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffafebe05182ec72742a7430222ead89d3578cc136b5c611f772b1c78dbd8dac76000000006a4730440220390d30ac506250b940f6a27963da4be56a518c30b72f6296bed8cc67103eda590220643641b1a0deb825d58be995091098ab3dd0657733c6e220c261b936d8d6dd2d0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffb0c548c262ff721f2987449a245599e2bacbc13d76eb367e4f82b50190bed666000000006b4830450221008f4949ec2c9e7d16d24366a4923c8bed231f898a6da9b531cabb514fc946991002204be138a7f1da448e39042ddaaf03294bce13d418f64d424a4af2da9b54c0c7850121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffb0e3c6f1068912e5d01f88060fdfe0a70dcc414eb07a6b834edc3af420e2ccfd000000006a4730440220127369abc46130b929403341d3fbf9c05e173a8d561f1f0fef5444ed63ac46a702200bd23c874960dad2920f484ad9c3346740fb1922cfccb4e583eb2300143039080121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffb0f20186163ad2345f5762fc4c69bbb0e3b5aa13e27836d15384e7ed747477f4000000006a473044022024a312a0faf913d05413ac6e07e97985cfbc836ca3c1b8a43cee18948f3d7ef40220681e2d17755c08a31112de8aadfe0b1c2ba204bb1810649f3fd0d556c31e83570121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffb25a0ed8ce02582927af3a54d3fba2b52a3e16d7f5d0c7788e0076cc73040a1b010000006a4730440220452e760ff56a8526273169b81e59c8c21bd5f53ec364bbfe732757d07fcb81df0220411f7477391956b2c88671557ba022c1bd16009c7a3a906fa4a28dfed1b857fd0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffb2bc941a152e0469de7234a3fdbb2c61114f7d0964455e510675bba747ea33cd000000006a4730440220500682a03cdd37c81f982fd1862d9962f7fec32c5e8c26d1c9ac54fa2c433e40022012844cc3d3a4179a7c9034ff9cbb85a19f07ae1a4b1dadb5540db0aeeb3d687e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffb3dbba61710f03fcf6bb808038d4ec4b0ba3389f9e032b9e32d2af60e7b08fbd000000006a47304402205c31da4d4154a2d02c01d23331a3e7e201ee524084b6568ad5c02cea87099a260220713f217a097b005671ca5722bf1b3bb7ff09af1f180588cb218548d5259b96f90121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffb55e800fc21258d468f3083c23fb9bea53e8df1f4bafa70611b8e3962f962c4a000000006a47304402203a81e035fdc369fca3b4754e6f8e0d8e31bf6db24ce9f73a14cca19d96cb2ba3022068e4e2d14bb3d20183501b4e8b94de3d8e034b0cd16ffe3c80b3a6cce5940db20121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffb5b38d9e6c9e7f9be54066aa59711ee0617343bdaf5dbd1678b7c6182fd941ae000000006b483045022100f888a84ab42ac789753f1e99f1bcd8209770e5b4d86143fee7d3bea937f254e802204a43ea0629d74de358fdb0f01d64d1f26607b772b207d428acbdfa33ee2c2d090121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffb6db9493a34df6469dfea3b42ca2f1a25e14fafb997d86f8db2f4af3bd6164fd000000006b483045022100aff4711a2be643f27c9846e608048bd700e8045505bca115db016fac9ae8185002207d513711d8ae82a27edca6b82909a3a290b8becec2eed00241d1220f4ff28f510121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffb6eb859dd5057ee23bfc3f1fc60ff5d04da1c1dc3c54b1cc3230afc03cf16faf000000006a47304402200c8e5ed23a8ea018d03e96c82883098c9d3cb59e9f30b1680081e8863b55be9a0220543b5f70667d181561d3e5b67b139bfd30f480b8ea2e87e133d191436abf32f90121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffb70d0b9165758843ffcaafe2138cb9d4a49172bdc568a1e7783c506374532179000000006a4730440220453516b001177edba5fd195e1e4378c9f81819cd918dc661c4bb16215a5a344302203231cd02e5ce2f303b16181915af4656bd7a4953a9f99ad64696d46f8d1d9acb0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffb77b5c1ab9fe6cefd40562203eb7fb4a05650fe2099364874504c7afd4339f44000000006b4830450221009e8fe143a248aa99204dbd46f554a740d5ef8773f95a6d3822b10f2d97a8d7670220599bf23ed1750697771a0f4a71363082dd58545cc8634a59b97f14f4baef14990121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffb7bb0e4b1fdfc237117b1e967663956bc01284943e842b64b9dca9d8ba015eb6000000006a47304402204734b0c6ae20b54ee43b8702ecffcb6d8346fd2c266ff61c5534d9766f8d452e022070b078441e5d33aaf85d31e4d28a2f6c62d5dc7e27cbf08590580961370b9c170121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffba9e30e05c00c6922779163da0f0a983294770191449201c87d6ff62a86564f8000000006b483045022100d27bc37a60c711c654c40c48cd6ebeccc8c3ee46e756ee18f967e5cc2cd7335b02202fc1075382576b3366a4adffa6edb63b2b11a48a99f30b2c503271b9a7a016540121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffbb192a43a374efbf4c8a8131c74140867f6174dd576c70146849851eee61177c000000006b483045022100c9ce9b33f4cd4a074380ff9244386ef3613c7e6d96f1e5798ae46807e898622a022064e3ef8efda203930a5038f6528e292fc109468132f5ff98474abc0ea3953ceb0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffbd033eb17852a254e6a3d2b5768f1c2a5bf76dda57249afb34debb8b2d11405b000000006b483045022100f89f7ce37fe0c3b07438a34d0bbc08d02c101a4eda08bc88c0bbea1c40a97be902206f7b048c451aa91c4e04c18ad1efc0cb384c8841979bd43d2fdcf749c9f0bc730121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffbd108201926eee57dde17429cee0d0c39e3ed87d3b17fb135c6fa02ebe4951e6010000006a4730440220108cf7cd5b22ea7301f0cdc92b0f114bbcf0888c575cd20ed47ce1cfaea61b4202206f9f45c76481bebcda40627f18d4b8a33afc9c9807da61972a798f7d94098b730121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffbd5ca8f88d02f0949d5ebd5cb3f55aa29e6fcfe579e22d17aacaf5b326fc190b010000006b4830450221008d62b2e3e95586c7c04f01162eb27ccf84afa3c6584c88de17be2fa449e8d54e0220378eede595dd2dcfa0ea321ad226a6f9f9ba7083ad6dc644dc9447cd187d0da10121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffbda50b579fce635fbd5c221202d4b9053d0b5c4c1892f8acb8dd48fca605cad8000000006a47304402200eab482f47b34d9dec4eafa181bd47fe7ed3a4af4ee6d072ef9caa2a9bd01bbd02200dba2573a77541a671fad34efee98b95dca4bb0d1077475b53e677e75b76a3b20121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffbe635eb9523af40227bf3cdaf0d7f873ea49ad4af84abe8569892eec5e8c98fd000000006a47304402201b931bad9c6bbfae42fe047a7c6f229d91f5e8405814bc0839d17c4143925237022034ce2af3daad37a569e60f7654220b7ab177744ff3b4adbbceecf30ef28d2eb70121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffbf2089806a6cf30943a6126e24dc898d8d1c54927409ffe4366093b87f5e285d000000006a473044022008105ed0c20763d64939c815b8338834a0ac54b57b3d07e397e8a15dbd4883fe0220755c99ebcf6228bcd6c138148e9b1cf81572f46ae675e9d233fcdf53499890860121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffbf99addb3dbb82499590a1ffbf81ddf7f35c0775c1a6b154296cc2aafbacc715010000006b483045022100ff2d4d37db8e0f8017b3f6ce68ec2338c35cadba5f5cab6479ec0801b74efa92022054a95e235a851e84600647002a09e3b8708dccdca8f871f4c533216fc8fbcf350121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffbfee7a63d529933ef9f3d2e46ad9737edd219e995684c225d3325b962ab112dc010000006a4730440220706a08ca4cb305c6fd9d70472263d5f721da33d4f8a60f972add6883f2957b4d022004e1bdbbe5764f55c7448b1bf95aaa5d0ccc66438643d8f88b456a30eed3f8dc0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffc0bb402fd1a215aaea6332d655178e5075e5c9bf6ea10f750d5a8b7018918481000000006a473044022045e8b54352085606b169f69cf7303be2694601fd1e500920919ea253596409700220648549a6feca824b17d4585767bfc568233f227fc82baf3f5847b51cfd63d1c30121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffc11d3a8072f6b4e4d5df29a4dfff8b087aabc1e5ca5f9dfd14556e6bd2ccbf91010000006a47304402207f5fabacb50c5be6bd60a3adffd61e230c08abb1ac075a27ea58cf2c96bfd12302204d20ba436f6531492750ce536cc132ea20f2d04ab9eece00eb9bda4544c10e140121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffc259de25effc3f923a85e3dd26385bb6cca994914e0821e2b77da6ae38527456000000006a47304402207bf11a929a26808fd0748c7ba1c596786d905d2747959e625cedc0fc1bbd0deb0220431155c9421d1a2f603ad88eecff6ec27f106fd80444a6710a5ca2db92282b9f0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffc3e862358b92d43bc6bd63a434ae25c5ab98f0a59a6495b7aa4491edbbbaa8cf000000006a47304402201b499c3fbeab7cbd86ccd5523dcc2cdf53e367d01d8b88a1f9a967f8ba188a6902205833ab47cea14ba0c5d5fe2373480bd949003c7b000b635fe4c6f7a4fc93c7c90121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffc45f6339d6c97baa2edd16a62cd7e491a5db03e923c6202c94376157df16de51000000006a47304402202736bebb5d6a7db8f211e1c233ad743186bc3c3151f5b14b2ed43c10c03bc3f702202c124288c6ec7bae8a4eaa183966ff34bc4132039486649f87edc6871d0a7dd30121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffc6689b667b0e08ed20e74f0fed6dfd0f8f49aec6f928e63f0e1836b63741c8a2000000006a473044022015bd26bdcd224664acf1498985c1eac6d5ba0631949c225319a4a0ac9ff9862102205130be8314961cc9c20719ef164f11e95d0942a54083469b060831e5af7b15020121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffc77b747e967f3bfacb975dbd476935a16f603ab7a6a912c2c42bc7be311d24da000000006b483045022100a5047855f768b20f4ac4e925dea8b206579c769b6c198de9038e01457b3c3843022022b4b6531c017f6ff3993abb174bd49377c2249c552ed33239297fac9c7cf8300121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffc79ce5df8ebe7c27161cb6e8fc1fc41e88826e729f11910769a8b23a9b5ba9d1000000006a473044022067196c4b172d2fb8201bf13c3ba1b411ddd4df676f7c3d2a62afa1d51bfc86420220183e2d9c7d7f80d34859f23404b842b4d619fadea0f6e0ae9889320c88111c780121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffc7de8f45fa7b1352318cfd7f33e8a0a5f2db650512ba1fa2af5fb04721fa4c8b000000006a473044022017dd4400b66ccd099e398d73dada95948d07affb906baf8d6bb9cebac6bf403702205581479b9e56a2aefbf8ca0dc51432b355dfefa58fbdffd9a0e652baf90cc6520121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffc82243ef0be0a420765e68cbbe6016392964183a3c8dd86b1ff376f710e86fa7000000006b483045022100f36548d481f6e291325c4891b2a99798141124db0f0eeffd51ada175bbe1141e02205336966c409dc66eca7b976fa53bf5add6fa438f4744b9fe87aa6187b5a03f860121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffc930baf44528052f6433f02f3b4d67d7a59f5c737f84751b3f242f2debee45c5000000006b48304502210080f3262f8964f9210f190f2d0ef04965b513d7ff3408ec9947d9665424bc531102203495ee60fec1ec0487566d4aea1c11b515238dbac9f4527cbd41d1fc134500cf0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffcac26af3732d34aa5738db083fdd42ddeb44349c0135463a501746caeeacb9ab000000006b483045022100887e17cac882ee8dbf9b003cac28be34b42fae50003661791b25a7b7874901ee02206767a28e4bb1575b83711275afe32c5a87ecd281ed1a35e8b8c57fb870e6a4b10121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffcc95a02c701be990284ce8dcabf905bb0e0bbdab0fc67172806d6718819f77b3000000006a47304402200ed8ebf853104ad54088a32ac1757cab221f27c67e3e08d3a708767a6f3d3b54022073e9fbdfe2733247a5d5b6f67ca0b583d85d185a81274ed0ad374dd4a5851f2c0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffccc059f13e57c2f8ed0eb6818537e55890a43a02d41560e04e0fbe38cdaae00a000000006b483045022100ac6ea3856bf3a156bc7d5a648df5ac1846ffdc75c68a5c866c8ab48659fcca2c0220400cc9f99a0e5e518edc5c8b97a93d151699afbaf6c1321f29842ccd4190e8e00121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffce1dbb2868ecf22a4b1fbf7f140ec284cebfd008eecf1c846f7f8eed2e30c0eb000000006b4830450221008a848f17579dec5a57fcdbb69e38ddc13ef13612e152e6363d10b5203ff222d502204d0d58efe619c23fa61b84266e7be0074372b8713e8e9a723a5bd65bb357bcf90121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffce282ce5b81e68803c317883c7bc7e047148739912e90f648e53d516ee6935cb000000006b483045022100ca5f02e740c4edeb5d2c05a7fe35314be672f96758a88f20796491a8d28f991302200fec9eeebc50e0d268086db841cab0a833bf2ee92744e929f7d147626139f56a0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffcf0be33ffaffcec97c0b8ce7984f409adb6633ae2b4b000b57f74d9f5c675c62010000006a47304402206bddd6a58f5e4ce68bf81a35af323adab54ca420207f010e8cceb1fe395474d802203d3bdc008ee39edd9c5aa51ea2da255c26f186cdfc84b28b93b556bdcd83bfef0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffcf641d2f9dded5bca42d26e4adaa014ee59918f4fa97f56b05e086b54eab750a000000006b483045022100d1a1b73ed669b896afdca0ba1b498cb247f5d7472d182a6778fe640852ac8e9d022028063470cfd3bd6fbed7f91a26a066d55a782455e6cd26979d618de1ed0c406b0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffd25662a40bba3b15f65f045e895ac73cafc03e0659bd0f109f24348feb050738000000006a473044022048ac49ca916fde0251d089c56effff46b0aa83381e45f58c54f00ac140e6e07f0220052b8b544231fe12dd59bc59cffacc69779a33603f1ecbeec7cee5bec3f41d3e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffd71f6f137e6019b4b44d52050f96f5146d2bea07138d46cc3e0092af2a02fb19000000006a4730440220554d1f01312aefad2a8fb1ab193eea71817142c1ee4c2f8a9f61cf0a5af9d708022002786374d0de92c667fc21aab1c3e9dceb750a396262e8356e387b60aaf606a40121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffd7f6698ef060752bcbc51fad9edd3cc0ab678b50cc240a317f4b2e8b17653309000000006a47304402201e5bf034f9f5c4d7a29ab179b02216f02e55989aacd26a4b3bccdc76cb0d149c02204c5d6320c0851c2188be9d21dda93bfa5bd635f5bcecbc0107623b9938ade3410121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffd9c95aab74a5d7b3e484a7143127d4f6d3f8ad990ff26da82764463b10f66842010000006b483045022100afaa0896c1334e7f2e2c68fc2f5175311a597ab2a7c00bee918f3c8fc95841a302202778c7d41c4179f49449ccf1e2dc4df3fc8067759befb9382374231773b333c90121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffdd74c27ba795279d7e191bb067fb615f1a3b04553baf410c3f442bb9b3dc809f010000006a47304402201605547d28538418bb940fee20b0f660901f38a3347502771ee271316503b4b60220789b0e98575ae56ddb1c32a6a0d3fec0a11fa4dc551a114870a55186a37e5db00121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffdd9821077b65c95d6f93c7a39553b2602ce57fc1781f90ddae04aff0f3bc5f61000000006b483045022100dafac6ff0b47c344f6d1434121a76792e832c13ce05d72f7f91965ed5ccacee3022019f0545c4cda79de05591592bc0bc5ec53ca0424f1b2b9d129259c22b59aa3b50121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffde0ecb6db067c2ef47d94c4afa9ba8ce65f0d07e853b03ca06e0a940f166ff29000000006a47304402207cba2c16b0a3342b0c7e541f9a8c3fe6f57db992a8e67c82e9cb1e7472e7bcef022013207556063c1449d71c7bd2aa296c63f9e90dc447e3ee9d3ffc98ede64a2ed90121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffe0d9484309fe694cf94384457c7351014f6a68ed2e6e1a131917de4231277465010000006b483045022100c56c703cc24247847c3f23f10493970cd14be9a6df3c77eb4bca2778f8c13c12022030674b89acf9d78ae8245fa59064bad44a401352de2752697fe089e55a816e270121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffe1d2352d8a9a57c60d193f2f0f0a5528a846c9ed5cb85f7bcd2bf133d9038180000000006a47304402206fff304cc1539707ab33c4cc132dc42aff05d4fcb2e8eebdba42688915949d3f02205db261120f15c7825d926363d703fe86522ba01902ef84bfb7ee6754ca15ab130121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffe234b4b8f618613466b89bae8fac4f38f79585b8f640cedd25252c1eb66594df000000006a47304402203190dc24dd3c9833336b7060737e8f8e83014cec727b2a7b8e7be106adb88e540220233aa64f36b0a0908072bdc6a5a2ef5bfdbc90c26bd9741e98fe68ea711f03310121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffe30521f04d98cb6dbde485b0d737ad1c013a937150d5fc5c90da4eb3823e7dda000000006b483045022100c0e4234deb51620056bdaee7f315e39e94218c2ed7ffdb997c6536003834ec3602205b3160ce7a6344ebc2d4cef600115cbb112ac59b2cab542912b85e0b8bfdc4e30121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffe3ce3acbf8f52e34d784e0006141d425bdf64e52d9ea4ba6387bf658c2667adb010000006b483045022100f4d5523e696d9f25e9ccef52ff623f95f100678e8e9939f9de44ae1272f077d702207f38da16592a3fbdcda8bf25bd3de34dbc14ef3f8ee7a9e58110d1d4b53c193d0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffe66d7e4c9166e784fae5a614a12a38d25085e982289c948d405cbd42cd120733000000006a473044022079ad5738f41fb8583ad8ff42d972c8344e007ccb0f37fd40c8d197e5806075fa02200b008a31d8ad1561e570952991e2931475664258cb23129f410e5ea963463eb40121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffe704ac601aae73f19603f1572912e8807edd57fb44034d9b212fd32cd53af429000000006b483045022100c6f77756a1ddbab7053035a84e8640edcc0c5e7656a40a752249c1b7207e43690220789900bee0e8f7c6af4fef1533367aa466be9f693e2af45a44609172fca2084b0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffe7e4c487cd709d05565474536e69a87e81c4e64995163b3359fc4ab5bf174add000000006a4730440220696086f5358f0af697ac0d54203ac27ed28008f5d7afb1892b2371499d1da72702205dab0f963a19c1079f62259f0e3654d44714903f4c909c43fab4885de50a1ea10121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffe8591bd9c6c7ad5043f6c477408931286246f2200456e987c96bd35c470ee913000000006a47304402205de653442a9903900e8e043e01ccbe87a183401b13f14aca10e05f26156e84a802207230de43b29a5b28a3cda65e8f34f1fcd58324ccd3af9869f3923b0a9d5676830121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffe8fcfb61f7498545af326a30c3c44695a521d2cc91dc02b8a52ddefe5af1195d010000006b4830450221008ca534a11c3862bf7f5e84b59247e2662110a79c7220bf8aa81599da5bd73caa0220187eb77f9c3d1531e17b2f591a1e3d634c8a8498c0a766ffeae099be06e933250121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffe927957d22a3ffa39e3bb8e7792786c20089e281a0d7e023907fcbb3390e9e32000000006b483045022100990c7104331956db7cd8fd1d77dedf36f3195055f69ab37f86f3c7099310872a022030109ba5419fc09f8a5b502721c560fc6140c5a038fd5a03dd7fdd229d07a8a90121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffea371b075fa7e598ba231d515de9830bd5214d357f0deaefb9571ba4591da5cb000000006a473044022031f1dd3c9a155c6c7f9b0b68ecfdfbe91e8bd74479082c6ce92b49d678a53cc802204178bcc728499d48b472043a20b508ee8e0312f17b813efa6a77e8f474407a640121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffea4fca994d1ef3b93f7b965be034cac10a72db74b0fdbc8dc36a93e3360b1c17000000006b483045022100cfbf496ddf62e8c93312f018f1ddf0613745739b58e5bbf8672559e762297d9a02201208c70ce45659dfd092e618ae94c5f4f705981fa2879212e9cdf6220ff3f4390121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffeaa428f44c8b351de03a2c683f698fb97fc4b6f25ec1d72f88dd678ecde65f1d000000006a4730440220762f9bc2f889fff0aeaa2c1278a7a24a00e055711809fd8f7f4a0fe2e16d70a2022008b92950483db2993ddca07bbeba1b0f3a08f75d00d28edbd7a59a2829668cab0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffeaa66d86626167c1c0b1ae3c5fc96a66d5900e376ac4faafac579ce15292b8d8000000006b483045022100ba932b6204b00810402310c9a1eec20c13b99b0097e0b9698406717aa2d4a67c02207033fd8cbba0e24496649d463a0ee3bae4b5226ae6f931a20d665d9b0e31a9910121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffeaf28830f302f40eac1183ac1d041423573298f4c640f2e69ecf50762f20c094000000006a4730440220284c9048420afcdf09e27906049cbc819a0d5f6a324565c49fbb2173e546f732022074fbe11fcac088442fb9c97c1164234a0326283bd0982ac261a2f42c5853c22e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffeb0e8dc21c0e3b97ea3d7b120e0145bae9b39572957d5c709f8b2e55e14cc932010000006a47304402204c51f64acb844524c81b79ae07ed08fc4c6f1cd347f05c56f46a48ce41bd48f202207150d317f9a71a6489e69e9869a8dc75c1918f349ab462f5b6e917fe956059150121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffeb39b5fc1752c61fb0d632f73e42f00a4a6491858a067c13153647108144b26a000000006a473044022024ba4338a9ae5fe6fdf8ee866f4cc003803fe2b2f43802c4efe147248362d195022053e816d4dc72f41dc78671bd78c6a63a8f0a4809bc46337122eb972b2ebd772a0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffeb64cc6215a86228b2d4df67740389de4e321994d39655da89e130711c954268000000006b48304502210090b50982a505ab0de9d7045c82e822f2aafd08c35e7bfcfc2b235a2b52bdca1c02200bb3ad31a3c090de8fce89d8eb885328a53a20bf14efd41c28e0bd074b01e0aa0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffeba23e0c498dc3ea07430f9dad72890b05458ee0a3e3d8480d1060441808cfff000000006a4730440220452d2ff9489d2c7962e3d0b20097d9ae239f2f0b7ab80c8856ebcd79de00e3430220300bed07dc6736eb8d60c7f1501b67206cd0a7a4641011b611d51632b3bb51c30121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffebecd74e1e432534e35ef041f624dae3c836ff95dcdae495f9abfae367982404000000006a473044022018d9dc45aa0dcb5e5cf9931b4afd4e77e2a2ecdf0348f5fa7da1cea51cf2cc6c022059007a6281a38fd83addb5280fbd314940819003f25db36ed194901848e43aaf0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffedcdf9c428b19aaecaff8fe19cbe0b1e1b5989dca25f5f9a88508130049bed4a000000006b483045022100f9d6965f14cd18319550200fbdea7d1d2712b4d3f6687ea519409d08f6700281022009555a880a0c79c6dab4c93c0f37a860a680aeabd85c4db25a7d10f243ae830e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffee452d4dda3f3568f0ecbecd3cdb5430085a22dfb0c40d370d85794b65df3e8e000000006a473044022009c24fe724924f44aa2f40802870f9f2974a589e60901d993f895185af898c5102205f1de0a98e5bef06991f4a0ecbbf78769c76eee60ef784afc31f5b6ab33585360121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffef2f16af18893848feeae7861dc68bd12ed794c46b534a191698c2e9742d7e66010000006a4730440220399b636e9d98f0126c1b5941d010bb20756aa844164437f3412397df1d34b88d022040484d93303a170666408ef024351e8f6ce7de7eb59bcc6e3de17c7943f8e3c30121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13fefffffff07aef29877108cd676fe709090190949c6df8a53422084e2114689cca0ae1dc000000006a47304402204238b44564987fde4668ae6145e7e1ab556834c96103813cffd9a042cc572d1f022051eb54fa1d213c10193640c70899b7dfa9b4a2b377dc5c5c3bf9c2bcf406e96f0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13fefffffff1c26b96131878d28775850a53691a82bd25f30352ff1e37ddb47b3011269f3e000000006b483045022100afa3f1caa5b5b3e9364eb03da68385b8a750788684afe1d0c79c783cb65e51f80220036903e5966add38f5eff86ac06c4fcc44249fdb475c55ffce61ab845ebe2f4e0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13fefffffff41b3fbd4efd92419f5b06c2d2e5c0df71dfead9d14b824d45046d75d69cc63c000000006a47304402207328eb181f4020e37f5037556ddc0cddd851bd8bbae1d05955fe3f1aaaaf4256022061f003533d5d440933a3ac82551a01d3f153c1775d446ea14cf2aceee40917be0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13fefffffff4c7406bf59fc57c2c9be3466b1475d57551b6c35a361b933db0c2533c1957d5000000006a47304402201718a84f87b3e257aef763414620cfc4662f9bb2e5a8fcbdc390ed1aa46afd2902205098a6f1f53e20b4b2a1afeb3a35ce942775c7b2e66b20ede16156cd13791b1d0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13fefffffff5b590ccd5c38556fbf35daa1557a82d1c9f4aac07497658b84c54f215cc3e00010000006b4830450221009601180a48f20b718b2c4172a13ab758aa91258540a875575a8e1d1e1a039d290220608af9b9b28a5c38b4c60ffac508136a8c3fee94f8c64e99d178d4eede6824770121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13fefffffff72a3230db3e78f21393ece76b8f9f41e3a1a2739290b3f18ce4e94b81e468c4000000006a4730440220184aeeb212a7b5b487e33cf513fa16fd53f6c09254bc1ee60145c95d0071370e02200ae59368df71c1ef8caf1a0f71339e6b8af5b531d7b955b4dc2984c08aeefd620121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13fefffffff8086db0fb0eed0df150f9e43995f1b7a0295b59e04bf466c7c18ddcea26d483000000006a473044022064e33f86810006a8a8e249cc88ab2a2d47e688ffed12446ad9249799e09e3a5102204a283be835f8b79c19fc423f0e9220fb56a483ddbf576e1eae25fcc0fdf55b710121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13fefffffff94d664a357c197ec89bf5e91e573d3926d6aac881d44cf10baef75b3d5cc6da000000006b483045022100e611387e956cac63daa9c3e116a73abdff1da852d259096b3cdc1f3f9b94aeda02207285c6dbb7d0d13311977c38da9180dd62f3415520faaa614c24c6195053af300121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13fefffffffa4a510ab2dc230c26f84cd252c7fce66d36d66654e2b634794242d6b5ba6404000000006a47304402206bbcc0d9d5f949757cef6415e6b3e9252d58830994d2f374e6d584bbc7904d2002204df280f1f4319275b67edc25ff61b87d0acd745ce88064ddb5f9690f1352ab670121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13fefffffffa7f7c0120824cbb885291f7c9c24c5facc7677f87d97fbfeaf81b930c5cbe14000000006a473044022063ba9496a37eb81bde78feb531187567fa957fdc45933092c173186a4cb2de670220757934dbe159e9a6411b7e597b11b6eeef7a38ff57d54a4891303d3fe7b58d150121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13fefffffffab1a7f322d18aab5c19e26a46e89cbb028c7de9751495102553195072f34cfe000000006a473044022024ddc6dc25a41ecab466f2bf912b9469ddbdcda344ef82b48af651e3f9291a590220037f641457612e77c364b47926291b73deb1f6ba2b47e0176de65354de0169af0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13fefffffffbd01e443b38afe9584b6f3661efd6f248bdb07989f652b4656edbe32586e53b000000006a4730440220277bd2ed2c8c60ddcf4064cf995636e4b403d451fee0f7c8ad6d8bf5689953c002202039a568733b1ea4316284a8bf0ff68b634b7e7a86d86d5160ca9ed0f6a778740121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13fefffffffd4e7a1146cc34603da4daeb52c895e94a55337adc634c313285c1bfad1bc56c000000006a47304402203cabce70168071c56c7fcc8fdf67f57ba7e3b35b15afa1a848b72b34a1ee71d902204f59c01111c03fa0ec1a0ac435330d2ecb26f9ecafd269adbfdbf7cab6a0c21b0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffff37097e5155ebe5d9afa500d471aaa2d3530081bdbdbba015aa97ebe7f89d0b000000006b483045022100f9dd302552fb316a6f9c181517eeac15e1c49826a0d774daf391e75efe1717b602203d43934b1a87e9877f99c2ee520f3e3b1ef4f318adee7027f7f4848e49c262760121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffffff68d82ccfaeef761c54f9e05d4bc88beb203414ee8d19bc631ed6901667186e000000006a47304402206319d9d9970ea82c9b7107da05c87811f77d9073169fe31b546f3f1d3cb5192a02201dd1a2434c2b7bb14fa15a754a8014261cf5e6eadef8f110bf3bb656c80e137b0121025f2e83c0a102d247a6c4b93960cc0c82f6f5ac7e18e64c85059daca0e45ecc13feffffff01ad8e64aa040000001976a91456511d88bb5037afd0142c2e5d1de0785141ce1088ac272e1600020000000001013efef40f59e8d4344a5f88533c1b7a3837f4baecf714309fde39b7f97117c4d201000000171600143e73b902e26f6f983eb83b9130332a67b0544405feffffff024c936e000000000017a9142e154b7137d2656d71b5f4f554f0424f086e048f8720f40e000000000017a91464df97e85f298dcb76d60766cc3fc448cd284efd87024730440220118f74a9d82b42ac4707256350f696eae7a7a12497ae7cf24aeb2ea7802be8bd02201c32bda2c0dc07ccdc4ee08c33cf7fa3d6a19eb9eb3ad042ad58388f3db676c4012102b2362f1c01d06aafdee1b940b3ad0a3e40f9e90e6ccd82409e311b6314e64a56272e1600"
block_bytes = bytearray(convert.hex_to_bytes(segwit_block_hex))
off = BTC_HDR_COMMON_OFF + BTC_BLOCK_HDR_SIZE
tx_count, size = btc_varint_to_int(block_bytes, off)
self.assertEqual(65, tx_count)
off += size
for _ in range(tx_count):
tx_size = get_next_tx_size(block_bytes, off)
off += tx_size
self.assertEqual(off, len(block_bytes))
| 5,142.785714
| 143,161
| 0.9985
| 126
| 143,998
| 1,140.706349
| 0.333333
| 0.000209
| 0.000188
| 0.000271
| 0.000417
| 0.000417
| 0.000417
| 0.000417
| 0
| 0
| 0
| 0.649993
| 0.001118
| 143,998
| 27
| 143,162
| 5,333.259259
| 0.349256
| 0
| 0
| 0
| 0
| 0
| 0.993993
| 0.993993
| 0
| 1
| 0
| 0
| 0.117647
| 1
| 0.058824
| false
| 0
| 0.294118
| 0
| 0.411765
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
36c5bea6a1b5bbcab0632e0a182b5e57a08c7945
| 11,177
|
py
|
Python
|
src/model.py
|
lsheiba/Image-Denoising-with-Deep-CNNs
|
8396c5b6db8b6ed0103f873cae67fc07ab760a49
|
[
"MIT"
] | null | null | null |
src/model.py
|
lsheiba/Image-Denoising-with-Deep-CNNs
|
8396c5b6db8b6ed0103f873cae67fc07ab760a49
|
[
"MIT"
] | null | null | null |
src/model.py
|
lsheiba/Image-Denoising-with-Deep-CNNs
|
8396c5b6db8b6ed0103f873cae67fc07ab760a49
|
[
"MIT"
] | null | null | null |
import os
import numpy as np
import torch
from torch import nn
from torch.nn import functional as F
import torch.utils.data as td
import torchvision as tv
import pandas as pd
from PIL import Image
from matplotlib import pyplot as plt
from utils import imshow, NNRegressor
class DnCNN(NNRegressor):
def __init__(self, D, C=64):
super(DnCNN, self).__init__()
self.D = D
# convolution layers
self.conv = nn.ModuleList()
self.conv.append(nn.Conv2d(3, C, 3, padding=1))
self.conv.extend([nn.Conv2d(C, C, 3, padding=1) for _ in range(D)])
self.conv.append(nn.Conv2d(C, 3, 3, padding=1))
# apply He's initialization
for i in range(len(self.conv[:-1])):
nn.init.kaiming_normal_(
self.conv[i].weight.data, nonlinearity='relu')
# batch normalization
self.bn = nn.ModuleList()
self.bn.extend([nn.BatchNorm2d(C, C) for _ in range(D)])
# initialize the weights of the Batch normalization layers
for i in range(D):
nn.init.constant_(self.bn[i].weight.data, 1.25 * np.sqrt(C))
def forward(self, x):
D = self.D
h = F.relu(self.conv[0](x))
for i in range(D):
h = F.relu(self.bn[i](self.conv[i+1](h)))
y = self.conv[D+1](h) + x
return y
class QDnCNN(NNRegressor):
def __init__(self, D, C=64):
super(QDnCNN, self).__init__()
self.D = D
self.quant = torch.quantization.QuantStub()
self.dequant = torch.quantization.DeQuantStub()
# convolution layers
self.conv = nn.ModuleList()
self.conv.append(nn.Conv2d(3, C, 3, padding=1))
self.conv.extend([nn.Conv2d(C, C, 3, padding=1) for _ in range(D)])
self.conv.append(nn.Conv2d(C, 3, 3, padding=1))
# apply He's initialization
for i in range(len(self.conv[:-1])):
nn.init.kaiming_normal_(
self.conv[i].weight.data, nonlinearity='relu')
# batch normalization
self.bn = nn.ModuleList()
self.bn.extend([nn.BatchNorm2d(C, C) for _ in range(D)])
# initialize the weights of the Batch normalization layers
for i in range(D):
nn.init.constant_(self.bn[i].weight.data, 1.25 * np.sqrt(C))
def forward(self, x):
x = self.quant(x)
D = self.D
h = F.relu(self.conv[0](x))
for i in range(D):
h = F.relu(self.bn[i](self.conv[i+1](h)))
y_pre = self.conv[D+1](h)
y_pre, x = self.dequant(y_pre), self.dequant(x)
y = y_pre + x
return y
class UDnCNN(NNRegressor):
def __init__(self, D, C=64):
super(UDnCNN, self).__init__()
self.D = D
# convolution layers
self.conv = nn.ModuleList()
self.conv.append(nn.Conv2d(3, C, 3, padding=1))
self.conv.extend([nn.Conv2d(C, C, 3, padding=1) for _ in range(D)])
self.conv.append(nn.Conv2d(C, 3, 3, padding=1))
# apply He's initialization
for i in range(len(self.conv[:-1])):
nn.init.kaiming_normal_(
self.conv[i].weight.data, nonlinearity='relu')
# batch normalization
self.bn = nn.ModuleList()
self.bn.extend([nn.BatchNorm2d(C, C) for _ in range(D)])
# initialize the weights of the Batch normalization layers
for i in range(D):
nn.init.constant_(self.bn[i].weight.data, 1.25 * np.sqrt(C))
def forward(self, x):
D = self.D
h = F.relu(self.conv[0](x))
h_buff = []
idx_buff = []
shape_buff = []
for i in range(D//2-1):
shape_buff.append(h.shape)
h, idx = F.max_pool2d(F.relu(self.bn[i](self.conv[i+1](h))),
kernel_size=(2, 2), return_indices=True)
h_buff.append(h)
idx_buff.append(idx)
for i in range(D//2-1, D//2+1):
h = F.relu(self.bn[i](self.conv[i+1](h)))
for i in range(D//2+1, D):
j = i - (D // 2 + 1) + 1
h = F.max_unpool2d(F.relu(self.bn[i](self.conv[i+1]((h+h_buff[-j])/np.sqrt(2)))),
idx_buff[-j], kernel_size=(2, 2), output_size=shape_buff[-j])
y = self.conv[D+1](h) + x
return y
class QUDnCNN(NNRegressor):
def __init__(self, D, C=64):
super(QUDnCNN, self).__init__()
self.D = D
self.quant = torch.quantization.QuantStub()
self.dequant = torch.quantization.DeQuantStub()
# convolution layers
self.conv = nn.ModuleList()
self.conv.append(nn.Conv2d(3, C, 3, padding=1))
self.conv.extend([nn.Conv2d(C, C, 3, padding=1) for _ in range(D)])
self.conv.append(nn.Conv2d(C, 3, 3, padding=1))
# apply He's initialization
for i in range(len(self.conv[:-1])):
nn.init.kaiming_normal_(
self.conv[i].weight.data, nonlinearity='relu')
# batch normalization
self.bn = nn.ModuleList()
self.bn.extend([nn.BatchNorm2d(C, C) for _ in range(D)])
# initialize the weights of the Batch normalization layers
for i in range(D):
nn.init.constant_(self.bn[i].weight.data, 1.25 * np.sqrt(C))
def forward(self, x):
D = self.D
h = F.relu(self.conv[0](x))
h_buff = []
idx_buff = []
shape_buff = []
for i in range(D//2-1):
shape_buff.append(h.shape)
h, idx = F.max_pool2d(F.relu(self.bn[i](self.conv[i+1](h))),
kernel_size=(2, 2), return_indices=True)
h_buff.append(h)
idx_buff.append(idx)
for i in range(D//2-1, D//2+1):
h = F.relu(self.bn[i](self.conv[i+1](h)))
for i in range(D//2+1, D):
j = i - (D // 2 + 1) + 1
h = F.max_unpool2d(F.relu(self.bn[i](self.conv[i+1]((h+h_buff[-j])/np.sqrt(2)))),
idx_buff[-j], kernel_size=(2, 2), output_size=shape_buff[-j])
y_pre = self.conv[D+1](h)
y_pre, x = self.dequant(y_pre), self.dequant(x)
y = y_pre + x
return y
class QDUDnCNN(NNRegressor):
def __init__(self, D, C=64):
super(QDUDnCNN, self).__init__()
self.D = D
self.quant = torch.quantization.QuantStub()
self.dequant = torch.quantization.DeQuantStub()
# compute k(max_pool) and l(max_unpool)
k = [0]
k.extend([i for i in range(D//2)])
k.extend([k[-1] for _ in range(D//2, D+1)])
l = [0 for _ in range(D//2+1)]
l.extend([i for i in range(D+1-(D//2+1))])
l.append(l[-1])
# holes and dilations for convolution layers
holes = [2**(kl[0]-kl[1])-1 for kl in zip(k, l)]
dilations = [i+1 for i in holes]
# convolution layers
self.conv = nn.ModuleList()
self.conv.append(
nn.Conv2d(3, C, 3, padding=dilations[0], dilation=dilations[0]))
self.conv.extend([nn.Conv2d(C, C, 3, padding=dilations[i+1],
dilation=dilations[i+1]) for i in range(D)])
self.conv.append(
nn.Conv2d(C, 3, 3, padding=dilations[-1], dilation=dilations[-1]))
# apply He's initialization
for i in range(len(self.conv[:-1])):
nn.init.kaiming_normal_(
self.conv[i].weight.data, nonlinearity='relu')
# batch normalization
self.bn = nn.ModuleList()
self.bn.extend([nn.BatchNorm2d(C, C) for _ in range(D)])
# initialize the weights of the Batch normalization layers
for i in range(D):
nn.init.constant_(self.bn[i].weight.data, 1.25 * np.sqrt(C))
def forward(self, x):
x = self.quant(x)
D = self.D
h = F.relu(self.conv[0](x))
h_buff = []
for i in range(D//2 - 1):
torch.backends.cudnn.benchmark = True
h = self.conv[i+1](h)
torch.backends.cudnn.benchmark = False
h = F.relu(self.bn[i](h))
h_buff.append(h)
for i in range(D//2 - 1, D//2 + 1):
torch.backends.cudnn.benchmark = True
h = self.conv[i+1](h)
torch.backends.cudnn.benchmark = False
h = F.relu(self.bn[i](h))
for i in range(D//2 + 1, D):
j = i - (D//2 + 1) + 1
torch.backends.cudnn.benchmark = True
h = self.dequant(h)
h_buff_j = self.dequant(h_buff[-j])
h_sum = (h + h_buff_j) / np.sqrt(2)
h_sum = self.quant(h_sum)
h = self.conv[i+1](h_sum)
torch.backends.cudnn.benchmark = False
h = F.relu(self.bn[i](h))
y_pre = self.conv[D+1](h)
y_pre, x = self.dequant(y_pre), self.dequant(x)
y = y_pre + x
# y = self.dequant(y)
return y
class DUDnCNN(NNRegressor):
def __init__(self, D, C=64):
super(DUDnCNN, self).__init__()
self.D = D
# compute k(max_pool) and l(max_unpool)
k = [0]
k.extend([i for i in range(D//2)])
k.extend([k[-1] for _ in range(D//2, D+1)])
l = [0 for _ in range(D//2+1)]
l.extend([i for i in range(D+1-(D//2+1))])
l.append(l[-1])
# holes and dilations for convolution layers
holes = [2**(kl[0]-kl[1])-1 for kl in zip(k, l)]
dilations = [i+1 for i in holes]
# convolution layers
self.conv = nn.ModuleList()
self.conv.append(
nn.Conv2d(3, C, 3, padding=dilations[0], dilation=dilations[0]))
self.conv.extend([nn.Conv2d(C, C, 3, padding=dilations[i+1],
dilation=dilations[i+1]) for i in range(D)])
self.conv.append(
nn.Conv2d(C, 3, 3, padding=dilations[-1], dilation=dilations[-1]))
# apply He's initialization
for i in range(len(self.conv[:-1])):
nn.init.kaiming_normal_(
self.conv[i].weight.data, nonlinearity='relu')
# batch normalization
self.bn = nn.ModuleList()
self.bn.extend([nn.BatchNorm2d(C, C) for _ in range(D)])
# initialize the weights of the Batch normalization layers
for i in range(D):
nn.init.constant_(self.bn[i].weight.data, 1.25 * np.sqrt(C))
def forward(self, x):
D = self.D
h = F.relu(self.conv[0](x))
h_buff = []
for i in range(D//2 - 1):
torch.backends.cudnn.benchmark = True
h = self.conv[i+1](h)
torch.backends.cudnn.benchmark = False
h = F.relu(self.bn[i](h))
h_buff.append(h)
for i in range(D//2 - 1, D//2 + 1):
torch.backends.cudnn.benchmark = True
h = self.conv[i+1](h)
torch.backends.cudnn.benchmark = False
h = F.relu(self.bn[i](h))
for i in range(D//2 + 1, D):
j = i - (D//2 + 1) + 1
torch.backends.cudnn.benchmark = True
h = self.conv[i+1]((h + h_buff[-j]) / np.sqrt(2))
torch.backends.cudnn.benchmark = False
h = F.relu(self.bn[i](h))
y = self.conv[D+1](h) + x
return y
| 34.496914
| 93
| 0.537264
| 1,700
| 11,177
| 3.45
| 0.067059
| 0.084569
| 0.054561
| 0.060017
| 0.934186
| 0.931799
| 0.929753
| 0.927366
| 0.895652
| 0.895652
| 0
| 0.030331
| 0.309743
| 11,177
| 323
| 94
| 34.603715
| 0.729877
| 0.081686
| 0
| 0.879668
| 0
| 0
| 0.002345
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.049793
| false
| 0
| 0.045643
| 0
| 0.145228
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36cd4ddcc333ba3d1a90edf94d7ff0e3fe27aa7b
| 9,177
|
py
|
Python
|
fly/migrations/0001_initial.py
|
chunjie-sam-liu/LNCediting
|
24c1bbe5f03117da06d3d2fda492d4d5ad45c473
|
[
"MIT"
] | null | null | null |
fly/migrations/0001_initial.py
|
chunjie-sam-liu/LNCediting
|
24c1bbe5f03117da06d3d2fda492d4d5ad45c473
|
[
"MIT"
] | 1
|
2020-04-14T11:33:29.000Z
|
2020-04-14T11:33:29.000Z
|
fly/migrations/0001_initial.py
|
chunjie-sam-liu/LNCediting
|
24c1bbe5f03117da06d3d2fda492d4d5ad45c473
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-06-16 03:54
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='edit_sequence',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('wild_sequence', models.TextField(null=True)),
('wild_energy', models.FloatField(null=True)),
('edit_squence', models.TextField(null=True)),
('edit_energy', models.FloatField(null=True)),
('delta_energy', models.FloatField(null=True)),
],
),
migrations.CreateModel(
name='edit_site_info',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('chromosome', models.CharField(max_length=45, null=True)),
('chr_edit_pos', models.BigIntegerField(null=True)),
('trans_edit_pos', models.BigIntegerField(null=True)),
('lncrna_id', models.CharField(max_length=225, null=True)),
('resource', models.CharField(max_length=225, null=True)),
],
),
migrations.CreateModel(
name='function_gains',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mirna_id', models.CharField(max_length=225, null=True)),
('chromosome', models.CharField(max_length=20, null=True)),
('chr_edit_pos', models.BigIntegerField(null=True)),
('lncrna_id', models.CharField(max_length=225, null=True)),
('resource', models.CharField(max_length=200, null=True)),
('score', models.FloatField(null=True)),
('energy', models.FloatField(null=True)),
('targetscan_start_r', models.BigIntegerField(null=True)),
('targetscan_end_r', models.BigIntegerField(null=True)),
('miranda_start_r', models.BigIntegerField(null=True)),
('miranda_end_r', models.BigIntegerField(null=True)),
('ref_edit_pos', models.IntegerField(null=True)),
('en', models.FloatField(null=True)),
('query_start', models.IntegerField(null=True)),
('query_end', models.IntegerField(null=True)),
('ref_start', models.IntegerField(null=True)),
('ref_end', models.IntegerField(null=True)),
('align_length', models.IntegerField(null=True)),
('query_percentage', models.FloatField(null=True)),
('ref_percentage', models.FloatField(null=True)),
('query_match_sequence', models.CharField(max_length=225, null=True)),
('match_string', models.CharField(max_length=225, null=True)),
('ref_match_sequence', models.CharField(max_length=225, null=True)),
('edit_site_info', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fly.edit_site_info')),
],
),
migrations.CreateModel(
name='function_losses',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mirna_id', models.CharField(max_length=225, null=True)),
('chromosome', models.CharField(max_length=20, null=True)),
('chr_edit_pos', models.BigIntegerField(null=True)),
('lncrna_id', models.CharField(max_length=225, null=True)),
('resource', models.CharField(max_length=200, null=True)),
('score', models.FloatField(null=True)),
('energy', models.FloatField(null=True)),
('targetscan_start_r', models.BigIntegerField(null=True)),
('targetscan_end_r', models.BigIntegerField(null=True)),
('miranda_start_r', models.BigIntegerField(null=True)),
('miranda_end_r', models.BigIntegerField(null=True)),
('ref_edit_pos', models.IntegerField(null=True)),
('en', models.FloatField(null=True)),
('query_start', models.IntegerField(null=True)),
('query_end', models.IntegerField(null=True)),
('ref_start', models.IntegerField(null=True)),
('ref_end', models.IntegerField(null=True)),
('align_length', models.IntegerField(null=True)),
('query_percentage', models.FloatField(null=True)),
('ref_percentage', models.FloatField(null=True)),
('query_match_sequence', models.CharField(max_length=225, null=True)),
('match_string', models.CharField(max_length=225, null=True)),
('ref_match_sequence', models.CharField(max_length=225, null=True)),
('edit_site_info', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fly.edit_site_info')),
],
),
migrations.CreateModel(
name='lncrna_info',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('lncrna_id', models.CharField(max_length=225, null=True)),
('combined_resource', models.CharField(max_length=225, null=True)),
('strand', models.CharField(max_length=4, null=True)),
('annotation_by_RADAR', models.CharField(max_length=225, null=True)),
('chromosome', models.CharField(max_length=45, null=True)),
('trans_start', models.BigIntegerField(null=True)),
('trans_end', models.BigIntegerField(null=True)),
('exons_count', models.IntegerField(null=True)),
('exons_start', models.TextField(null=True)),
('exons_end', models.TextField(null=True)),
('wild_sequence', models.TextField(null=True)),
('wild_energy', models.FloatField(null=True)),
('edit_sequence', models.TextField(null=True)),
('edit_energy', models.FloatField(null=True)),
('delta_energy', models.FloatField(null=True)),
('edit_num', models.IntegerField(null=True)),
('gain_num', models.IntegerField(null=True)),
('loss_num', models.IntegerField(null=True)),
],
),
migrations.CreateModel(
name='mirna_info',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('mirna_id', models.CharField(max_length=225)),
('accession', models.CharField(max_length=225)),
('chromosome', models.CharField(max_length=45)),
('strand', models.CharField(max_length=4)),
('start', models.BigIntegerField()),
('end', models.BigIntegerField()),
('mature_sequence', models.CharField(max_length=500)),
('pre_mirna_id', models.CharField(max_length=45)),
('pre_start', models.BigIntegerField()),
('pre_end', models.BigIntegerField()),
('pre_sequence', models.CharField(max_length=10000)),
],
),
migrations.CreateModel(
name='resource',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('lncrna_name', models.CharField(max_length=45, null=True)),
('resource', models.CharField(max_length=45, null=True)),
('description', models.TextField(null=True)),
('link', models.TextField(null=True)),
('lncrna_info', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fly.lncrna_info')),
],
),
migrations.AddField(
model_name='function_losses',
name='mirna_info',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fly.mirna_info'),
),
migrations.AddField(
model_name='function_gains',
name='mirna_info',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fly.mirna_info'),
),
migrations.AddField(
model_name='edit_site_info',
name='lncrna_info',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fly.lncrna_info'),
),
migrations.AddField(
model_name='edit_sequence',
name='edit_site_info',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='fly.edit_site_info'),
),
]
| 53.354651
| 124
| 0.578947
| 926
| 9,177
| 5.535637
| 0.118791
| 0.121732
| 0.108857
| 0.145142
| 0.855443
| 0.792041
| 0.761412
| 0.746586
| 0.727078
| 0.710691
| 0
| 0.014881
| 0.275035
| 9,177
| 171
| 125
| 53.666667
| 0.755599
| 0.007301
| 0
| 0.693252
| 1
| 0
| 0.145712
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.018405
| 0
| 0.042945
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
36dd1571a89368e5785d988ced6f64c35e490fb1
| 72,845
|
py
|
Python
|
reducer.py
|
vineeths96/QSGD
|
e5b61b1ebfa4288c3c2a9109819b72c13a93b5f7
|
[
"MIT"
] | 1
|
2021-12-06T18:51:26.000Z
|
2021-12-06T18:51:26.000Z
|
reducer.py
|
vineeths96/Gradient-Compression
|
e5b61b1ebfa4288c3c2a9109819b72c13a93b5f7
|
[
"MIT"
] | null | null | null |
reducer.py
|
vineeths96/Gradient-Compression
|
e5b61b1ebfa4288c3c2a9109819b72c13a93b5f7
|
[
"MIT"
] | null | null | null |
import torch
import torch.distributed
from compressors import (
NoneCompressor,
QSGDCompressor,
QSGDWECCompressor,
QSGDWECModCompressor,
TernGradCompressor,
TernGradModCompressor,
QSGDMaxNormCompressor,
# QSGDBPAllReduceCompressor,
# QSGDBPCompressor,
GlobalRandKMaxNormCompressor,
NUQSGDModCompressor,
NUQSGDMaxNormCompressor,
QSGDMaxNormBiasedCompressor,
NUQSGDMaxNormBiasedCompressor,
QSGDMaxNormTwoScaleCompressor,
GlobalRandKMaxNormTwoScaleCompressor,
QSGDMaxNormMultiScaleCompressor,
)
from seed import set_seed
class Reducer:
"""
Base class for Custom Reducers. All reducers derive from this class.
"""
def __init__(self, device, timer):
if torch.distributed.is_available():
self.n_workers = torch.distributed.get_world_size()
self.rank = torch.distributed.get_rank()
else:
self.n_workers = 1
self.rank = 0
self._device = device
self._timer = timer
def reduce(self, grad_in, grad_out):
raise NotImplementedError()
class TensorBuffer:
"""
Class to flatten and deflatten the gradient vector.
"""
def __init__(self, tensors):
indices = [0]
for tensor in tensors:
new_end = indices[-1] + tensor.nelement()
indices.append(new_end)
self._start_idx = indices[:-1]
self._end_idx = indices[1:]
self._len_tensors = len(tensors)
self._tensor_shapes = [tensor.size() for tensor in tensors]
self.buffer = torch.cat([tensor.view(-1) for tensor in tensors])
def __getitem__(self, index):
return self.buffer[self._start_idx[index] : self._end_idx[index]].view(self._tensor_shapes[index])
def __len__(self):
return self._len_tensors
class NoneReducer(Reducer):
"""
All gather reducer without any compressing.
"""
def __init__(self, device, timer):
super(NoneReducer, self).__init__(device, timer)
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = NoneCompressor(self._device)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
with self._timer("reduce.compress", verbosity=2):
compressed_tensor, compressed_tensor_size = compressor.compress(flat_grad.buffer)
with self._timer("reduce.gather", verbosity=2):
if self.n_workers > 1:
collected_tensor_sizes = [torch.empty_like(compressed_tensor_size) for _ in range(self.n_workers)]
size_gather_op = torch.distributed.all_gather(
tensor_list=collected_tensor_sizes,
tensor=compressed_tensor_size,
async_op=True,
)
size_gather_op.wait()
max_size = max(collected_tensor_sizes).item()
padded_compressed_tensors = torch.zeros(max_size, dtype=torch.int64, device=self._device)
padded_compressed_tensors[:compressed_tensor_size] = compressed_tensor
collected_tensors = [
torch.zeros(max_size, dtype=torch.int64, device=self._device) for _ in range(self.n_workers)
]
tensor_gather_op = torch.distributed.all_gather(
tensor_list=collected_tensors,
tensor=padded_compressed_tensors,
async_op=True,
)
tensor_gather_op.wait()
else:
collected_tensors = [compressed_tensor]
collected_tensor_sizes = [compressed_tensor_size]
bits_communicated += self.n_bits(compressed_tensor) + self.n_bits(compressed_tensor_size)
with self._timer("reduce.decompress", verbosity=2):
decompressed_tensors = []
for comp_tensor, comp_tensor_size in zip(collected_tensors, collected_tensor_sizes):
decomp_tensor = compressor.decompress(comp_tensor, comp_tensor_size)
decompressed_tensors.append(decomp_tensor)
with self._timer("reduce.average", verbosity=2):
for out in grad_out:
out[:] = 0.0
for decompressed_tensor in decompressed_tensors:
flat_grad.buffer = decompressed_tensor
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class NoneAllReducer(Reducer):
"""
All reduce reducer without any compressing.
"""
def __init__(self, device, timer):
super(NoneAllReducer, self).__init__(device, timer)
def reduce(self, grad_in, grad_out):
bits_communicated = 0
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
with self._timer("reduce.allreduce", verbosity=2):
if self.n_workers > 1:
tensor_reduce_op = torch.distributed.all_reduce(tensor=flat_grad.buffer, async_op=True)
tensor_reduce_op.wait()
else:
flat_grad = flat_grad
for out in grad_out:
out[:] = 0.0
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
bits_communicated += self.n_bits(flat_grad.buffer)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class QSGDReducer(Reducer):
"""
All gather reducer with QSGD compression and Elias encoding.
"""
def __init__(self, device, timer, quantization_level=8):
super(QSGDReducer, self).__init__(device, timer)
self._quantization_level = quantization_level
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = QSGDCompressor(self._device, self._quantization_level)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
with self._timer("reduce.compress", verbosity=2):
compressed_tensor, compressed_tensor_size = compressor.compress(flat_grad.buffer)
with self._timer("reduce.gather", verbosity=2):
if self.n_workers > 1:
collected_tensor_sizes = [torch.empty_like(compressed_tensor_size) for _ in range(self.n_workers)]
size_gather_op = torch.distributed.all_gather(
tensor_list=collected_tensor_sizes,
tensor=compressed_tensor_size,
async_op=True,
)
size_gather_op.wait()
max_size = max(collected_tensor_sizes).item()
padded_compressed_tensors = torch.zeros(max_size, dtype=torch.int64, device=self._device)
padded_compressed_tensors[:compressed_tensor_size] = compressed_tensor
collected_tensors = [
torch.zeros(max_size, dtype=torch.int64, device=self._device) for _ in range(self.n_workers)
]
tensor_gather_op = torch.distributed.all_gather(
tensor_list=collected_tensors,
tensor=padded_compressed_tensors,
async_op=True,
)
tensor_gather_op.wait()
else:
collected_tensors = [compressed_tensor]
collected_tensor_sizes = [compressed_tensor_size]
bits_communicated += self.n_bits(compressed_tensor) + self.n_bits(compressed_tensor_size)
with self._timer("reduce.decompress", verbosity=2):
decompressed_tensors = []
for comp_tensor, comp_tensor_size in zip(collected_tensors, collected_tensor_sizes):
decomp_tensor = compressor.decompress(comp_tensor, comp_tensor_size)
decompressed_tensors.append(decomp_tensor)
with self._timer("reduce.average", verbosity=2):
for out in grad_out:
out[:] = 0.0
for decompressed_tensor in decompressed_tensors:
flat_grad.buffer = decompressed_tensor
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class QSGDWECReducer(Reducer):
"""
All gather reducer with QSGD compression and without Elias encoding.
All gathers norms, sign array and xi vector.
"""
def __init__(self, device, timer, quantization_level=8):
super(QSGDWECReducer, self).__init__(device, timer)
self._quantization_level = quantization_level
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = QSGDWECCompressor(self._device, self._quantization_level)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
with self._timer("reduce.compress", verbosity=2):
norm, sign_array, xi_array = compressor.compress(flat_grad.buffer)
with self._timer("reduce.gather", verbosity=2):
if self.n_workers > 1:
collected_norms = [torch.empty_like(norm) for _ in range(self.n_workers)]
norms_gather_op = torch.distributed.all_gather(tensor_list=collected_norms, tensor=norm, async_op=True)
collected_signs = [torch.empty_like(sign_array) for _ in range(self.n_workers)]
signs_gather_op = torch.distributed.all_gather(
tensor_list=collected_signs, tensor=sign_array, async_op=True
)
collected_xis = [torch.empty_like(xi_array) for _ in range(self.n_workers)]
xi_gather_op = torch.distributed.all_gather(tensor_list=collected_xis, tensor=xi_array, async_op=True)
norms_gather_op.wait()
signs_gather_op.wait()
xi_gather_op.wait()
else:
collected_norms = [norm]
collected_signs = [sign_array]
collected_xis = [xi_array]
bits_communicated += self.n_bits(norm) + self.n_bits(sign_array) + self.n_bits(xi_array)
with self._timer("reduce.decompress", verbosity=2):
decompressed_tensors = []
for norm, sign_array, xi_array in zip(collected_norms, collected_signs, collected_xis):
decomp_tensor = compressor.decompress(norm, sign_array, xi_array)
decompressed_tensors.append(decomp_tensor)
with self._timer("reduce.average", verbosity=2):
for out in grad_out:
out[:] = 0.0
for decompressed_tensor in decompressed_tensors:
flat_grad.buffer = decompressed_tensor
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class QSGDWECModReducer(Reducer):
"""
All gather reducer with QSGD compression and without Elias encoding.
All gathers norms, sign array * xi vector.
"""
def __init__(self, device, timer, quantization_level=8):
super(QSGDWECModReducer, self).__init__(device, timer)
self._quantization_level = quantization_level
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = QSGDWECModCompressor(self._device, self._quantization_level)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
with self._timer("reduce.compress", verbosity=2):
norm, sign_xi_array = compressor.compress(flat_grad.buffer)
with self._timer("reduce.gather", verbosity=2):
if self.n_workers > 1:
collected_norms = [torch.empty_like(norm) for _ in range(self.n_workers)]
norms_gather_op = torch.distributed.all_gather(tensor_list=collected_norms, tensor=norm, async_op=True)
collected_sign_xis = [torch.empty_like(sign_xi_array) for _ in range(self.n_workers)]
sign_xis_gather_op = torch.distributed.all_gather(
tensor_list=collected_sign_xis, tensor=sign_xi_array, async_op=True
)
norms_gather_op.wait()
sign_xis_gather_op.wait()
else:
collected_norms = [norm]
collected_sign_xis = [sign_xi_array]
bits_communicated += self.n_bits(norm) + self.n_bits(sign_xi_array)
with self._timer("reduce.decompress", verbosity=2):
decompressed_tensors = []
for norm, sign_xi_array in zip(collected_norms, collected_sign_xis):
decomp_tensor = compressor.decompress(norm, sign_xi_array)
decompressed_tensors.append(decomp_tensor)
with self._timer("reduce.average", verbosity=2):
for out in grad_out:
out[:] = 0.0
for decompressed_tensor in decompressed_tensors:
flat_grad.buffer = decompressed_tensor
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class TernGradReducer(Reducer):
"""
All gather reducer with TernGrad compression.
All gathers norms, sign array and b vector.
"""
def __init__(self, device, timer):
super(TernGradReducer, self).__init__(device, timer)
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = TernGradCompressor(self._device)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
with self._timer("reduce.compress", verbosity=2):
scaler, sign_array, b_array = compressor.compress(flat_grad.buffer)
with self._timer("reduce.gather", verbosity=2):
if self.n_workers > 1:
collected_scalers = [torch.empty_like(scaler) for _ in range(self.n_workers)]
scaler_gather_op = torch.distributed.all_gather(
tensor_list=collected_scalers, tensor=scaler, async_op=True
)
collected_signs = [torch.empty_like(sign_array) for _ in range(self.n_workers)]
signs_gather_op = torch.distributed.all_gather(
tensor_list=collected_signs, tensor=sign_array, async_op=True
)
collected_bs = [torch.empty_like(b_array) for _ in range(self.n_workers)]
b_gather_op = torch.distributed.all_gather(tensor_list=collected_bs, tensor=b_array, async_op=True)
scaler_gather_op.wait()
signs_gather_op.wait()
b_gather_op.wait()
else:
collected_scalers = [scaler]
collected_signs = [sign_array]
collected_bs = [b_array]
bits_communicated += self.n_bits(scaler) + self.n_bits(sign_array) + self.n_bits(b_array)
with self._timer("reduce.decompress", verbosity=2):
decompressed_tensors = []
for scaler, sign_array, b_array in zip(collected_scalers, collected_signs, collected_bs):
decomp_tensor = compressor.decompress(scaler, sign_array, b_array)
decompressed_tensors.append(decomp_tensor)
with self._timer("reduce.average", verbosity=2):
for out in grad_out:
out[:] = 0.0
for decompressed_tensor in decompressed_tensors:
flat_grad.buffer = decompressed_tensor
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class TernGradModReducer(Reducer):
"""
All gather reducer with TernGrad compression.
All gathers norms, sign array * xi vector.
"""
def __init__(self, device, timer):
super(TernGradModReducer, self).__init__(device, timer)
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = TernGradModCompressor(self._device)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
with self._timer("reduce.compress", verbosity=2):
scaler, sign_b_array = compressor.compress(flat_grad.buffer)
with self._timer("reduce.gather", verbosity=2):
if self.n_workers > 1:
collected_scalers = [torch.empty_like(scaler) for _ in range(self.n_workers)]
scaler_gather_op = torch.distributed.all_gather(
tensor_list=collected_scalers, tensor=scaler, async_op=True
)
collected_sign_bs = [torch.empty_like(sign_b_array) for _ in range(self.n_workers)]
sign_bs_gather_op = torch.distributed.all_gather(
tensor_list=collected_sign_bs, tensor=sign_b_array, async_op=True
)
scaler_gather_op.wait()
sign_bs_gather_op.wait()
else:
collected_scalers = [scaler]
collected_sign_bs = [sign_b_array]
bits_communicated += self.n_bits(scaler) + self.n_bits(sign_b_array)
with self._timer("reduce.decompress", verbosity=2):
decompressed_tensors = []
for scaler, sign_b_array in zip(collected_scalers, collected_sign_bs):
decomp_tensor = compressor.decompress(scaler, sign_b_array)
decompressed_tensors.append(decomp_tensor)
with self._timer("reduce.average", verbosity=2):
for out in grad_out:
out[:] = 0.0
for decompressed_tensor in decompressed_tensors:
flat_grad.buffer = decompressed_tensor
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class QSGDMaxNormReducer(Reducer):
"""
All reduce reducer with QSGD compression and without Elias encoding.
All gathers norms, normalizing with max norm, all reduces sign array * xi vector.
"""
def __init__(self, device, timer, quantization_level=8):
super(QSGDMaxNormReducer, self).__init__(device, timer)
self._quantization_level = quantization_level
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = QSGDMaxNormCompressor(self._device, self._quantization_level)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
with self._timer("reduce.norm", verbosity=2):
norm = flat_grad.buffer.abs().max()
if self.n_workers > 1:
collected_norms = [torch.empty_like(norm) for _ in range(self.n_workers)]
norms_gather_op = torch.distributed.all_gather(tensor_list=collected_norms, tensor=norm, async_op=True)
norms_gather_op.wait()
max_norm = max(collected_norms)
else:
max_norm = norm
with self._timer("reduce.compress", verbosity=2):
sign_xi_array = compressor.compress(max_norm, flat_grad.buffer)
with self._timer("reduce.reduce.vector", verbosity=2):
if self.n_workers > 1:
sign_xi_reduce_op = torch.distributed.all_reduce(tensor=sign_xi_array, async_op=True)
sign_xi_reduce_op.wait()
sign_xi_array.true_divide(self.n_workers)
else:
sign_xi_array = sign_xi_array
bits_communicated += self.n_bits(norm) + self.n_bits(sign_xi_array)
with self._timer("reduce.decompress", verbosity=2):
flat_grad.buffer = compressor.decompress(max_norm, sign_xi_array)
with self._timer("reduce.setgrad", verbosity=2):
for out in grad_out:
out[:] = 0.0
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
# To use the below methods install the custom C++ PyTorch extensions
# class QSGDBPReducer(Reducer):
# """
# All gather reducer with QSGD compression and without Elias encoding.
# All gathers norms, bit packed sign vector, bit packed xi vector.
# """
#
# def __init__(self, device, timer, quantization_level=8):
# super(QSGDBPReducer, self).__init__(device, timer)
# self._quantization_level = quantization_level
#
# def reduce(self, grad_in, grad_out):
# bits_communicated = 0
# compressor = QSGDBPCompressor(self._device, self._quantization_level)
#
# with self._timer("reduce.flat_pack"):
# flat_grad = TensorBuffer(grad_in)
# tensor_size = flat_grad.buffer.shape[0]
#
# with self._timer("reduce.compress", verbosity=2):
# norm, sign_packed, xi_packed, xi_size = compressor.compress(flat_grad.buffer)
#
# with self._timer("reduce.gather", verbosity=2):
# if self.n_workers > 1:
# collected_norms = [torch.empty_like(norm) for _ in range(self.n_workers)]
# norms_gather_op = torch.distributed.all_gather(tensor_list=collected_norms, tensor=norm, async_op=True)
#
# collected_signs = [torch.empty_like(sign_packed) for _ in range(self.n_workers)]
# signs_gather_op = torch.distributed.all_gather(
# tensor_list=collected_signs, tensor=sign_packed, async_op=True
# )
#
# collected_xi_sizes = [torch.empty_like(xi_size) for _ in range(self.n_workers)]
# size_gather_op = torch.distributed.all_gather(
# tensor_list=collected_xi_sizes, tensor=xi_size, async_op=True
# )
# size_gather_op.wait()
#
# max_size = max(collected_xi_sizes).item()
# padded_xi_tensor = torch.zeros(max_size, dtype=torch.int32, device=self._device)
# padded_xi_tensor[:xi_size] = xi_packed
#
# collected_xis = [torch.empty_like(padded_xi_tensor) for _ in range(self.n_workers)]
# xi_gather_op = torch.distributed.all_gather(
# tensor_list=collected_xis, tensor=padded_xi_tensor, async_op=True
# )
#
# norms_gather_op.wait()
# signs_gather_op.wait()
# xi_gather_op.wait()
# else:
# collected_norms = [norm]
# collected_signs = [sign_packed]
# collected_xis = [xi_packed]
#
# bits_communicated += (
# self.n_bits(norm) + self.n_bits(sign_packed) + self.n_bits(xi_packed) + self.n_bits(xi_size)
# )
#
# with self._timer("reduce.decompress", verbosity=2):
# decompressed_tensors = []
# for norm, sign_packed, xi_packed in zip(collected_norms, collected_signs, collected_xis):
# decomp_tensor = compressor.decompress(norm, sign_packed, xi_packed, tensor_size)
# decompressed_tensors.append(decomp_tensor)
#
# with self._timer("reduce.average", verbosity=2):
# for out in grad_out:
# out[:] = 0.0
#
# for decompressed_tensor in decompressed_tensors:
# flat_grad.buffer = decompressed_tensor
# for grad, out in zip(flat_grad, grad_out):
# grad = grad.to(self._device)
# out.add_(other=grad, alpha=1 / self.n_workers)
#
# return bits_communicated
#
# def n_bits(self, tensor):
# return 8 * tensor.nelement() * tensor.element_size()
#
#
# class QSGDBPAllReducer(Reducer):
# """
# All reduce reducer with QSGD compression and without Elias encoding.
# All gathers norms, normalizing with max norm, all reduces packed sign array * xi vector.
# """
#
# def __init__(self, device, timer, quantization_level=8):
# super(QSGDBPAllReducer, self).__init__(device, timer)
# self._quantization_level = quantization_level
#
# def reduce(self, grad_in, grad_out):
# bits_communicated = 0
# compressor = QSGDBPAllReduceCompressor(self._device, self._quantization_level)
#
# with self._timer("reduce.flat_pack"):
# flat_grad = TensorBuffer(grad_in)
#
# with self._timer("reduce.reduce.norm", verbosity=2):
# norm = flat_grad.buffer.abs().max()
#
# if self.n_workers > 1:
# collected_norms = [torch.empty_like(norm) for _ in range(self.n_workers)]
# norms_gather_op = torch.distributed.all_gather(tensor_list=collected_norms, tensor=norm, async_op=True)
#
# norms_gather_op.wait()
# max_norm = max(collected_norms)
# else:
# max_norm = norm
#
# with self._timer("reduce.compress", verbosity=2):
# sign_xi_array = compressor.compress(max_norm, flat_grad.buffer)
#
# with self._timer("reduce.reduce.vector", verbosity=2):
# if self.n_workers > 1:
# sign_xi_reduce_op = torch.distributed.all_reduce(tensor=sign_xi_array, async_op=True)
# sign_xi_reduce_op.wait()
# sign_xi_array.true_divide(self.n_workers)
# else:
# sign_xi_array = sign_xi_array
#
# bits_communicated += self.n_bits(norm) + self.n_bits(sign_xi_array)
#
# with self._timer("reduce.decompress", verbosity=2):
# flat_grad.buffer = compressor.decompress(max_norm, sign_xi_array)
#
# with self._timer("reduce.setgrad", verbosity=2):
# for out in grad_out:
# out[:] = 0.0
#
# for grad, out in zip(flat_grad, grad_out):
# grad = grad.to(self._device)
# out.add_(other=grad, alpha=1 / self.n_workers)
#
# return bits_communicated
#
# def n_bits(self, tensor):
# return 8 * tensor.nelement() * tensor.element_size()
class GlobalRandKMaxNormReducer(Reducer):
"""
All reduce reducer with max norm compression of random K indices.
All gathers norms, normalizing with max norm, all reduces sign array * xi vector.
"""
def __init__(self, device, timer, seed, K=10000, quantization_level=8):
super(GlobalRandKMaxNormReducer, self).__init__(device, timer)
self._quantization_level = quantization_level
self._seed = seed
self._K = K
self._indices_queue = []
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = GlobalRandKMaxNormCompressor(self._device, self._quantization_level)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
if not self._indices_queue:
set_seed(self._seed)
self._indices_queue = torch.randperm(len(flat_grad.buffer)).split(self._K)
self._indices_queue = list(self._indices_queue)
RandK_indices = self._indices_queue.pop().numpy()
RandK_flat_grad = flat_grad.buffer[RandK_indices]
with self._timer("reduce.norm", verbosity=2):
norm = RandK_flat_grad.abs().max()
if self.n_workers > 1:
collected_norms = [torch.empty_like(norm) for _ in range(self.n_workers)]
norms_gather_op = torch.distributed.all_gather(tensor_list=collected_norms, tensor=norm, async_op=True)
norms_gather_op.wait()
max_norm = max(collected_norms)
else:
max_norm = norm
with self._timer("reduce.compress", verbosity=2):
sign_xi_array = compressor.compress(max_norm, RandK_flat_grad)
with self._timer("reduce.reduce.vector", verbosity=2):
if self.n_workers > 1:
sign_xi_reduce_op = torch.distributed.all_reduce(tensor=sign_xi_array, async_op=True)
sign_xi_reduce_op.wait()
sign_xi_array.true_divide(self.n_workers)
else:
sign_xi_array = sign_xi_array
bits_communicated += self.n_bits(norm) + self.n_bits(sign_xi_array)
with self._timer("reduce.decompress", verbosity=2):
RandK_decompressed = compressor.decompress(max_norm, sign_xi_array)
with self._timer("reduce.setgrad", verbosity=2):
flat_grad.buffer[RandK_indices] = RandK_decompressed
for out in grad_out:
out[:] = 0.0
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class NUQSGDModReducer(Reducer):
"""
All gather reducer with NUQSGD compression and without encoding.
All gathers norms, sign array * xi vector.
"""
def __init__(self, device, timer, quantization_level=8):
super(NUQSGDModReducer, self).__init__(device, timer)
self._quantization_level = quantization_level
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = NUQSGDModCompressor(self._device, self._quantization_level)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
with self._timer("reduce.compress", verbosity=2):
norm, sign_xi_array = compressor.compress(flat_grad.buffer)
with self._timer("reduce.gather", verbosity=2):
if self.n_workers > 1:
collected_norms = [torch.empty_like(norm) for _ in range(self.n_workers)]
norms_gather_op = torch.distributed.all_gather(tensor_list=collected_norms, tensor=norm, async_op=True)
collected_sign_xis = [torch.empty_like(sign_xi_array) for _ in range(self.n_workers)]
sign_xis_gather_op = torch.distributed.all_gather(
tensor_list=collected_sign_xis, tensor=sign_xi_array, async_op=True
)
norms_gather_op.wait()
sign_xis_gather_op.wait()
else:
collected_norms = [norm]
collected_sign_xis = [sign_xi_array]
bits_communicated += self.n_bits(norm) + self.n_bits(sign_xi_array)
with self._timer("reduce.decompress", verbosity=2):
decompressed_tensors = []
for norm, sign_xi_array in zip(collected_norms, collected_sign_xis):
decomp_tensor = compressor.decompress(norm, sign_xi_array)
decompressed_tensors.append(decomp_tensor)
with self._timer("reduce.average", verbosity=2):
for out in grad_out:
out[:] = 0.0
for decompressed_tensor in decompressed_tensors:
flat_grad.buffer = decompressed_tensor
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class NUQSGDMaxNormReducer(Reducer):
"""
All reduce reducer with NUQSGD compression and without encoding.
All gathers norms, normalizing with max norm, all reduces sign array * xi vector.
"""
def __init__(self, device, timer, quantization_level=8):
super(NUQSGDMaxNormReducer, self).__init__(device, timer)
self._quantization_level = quantization_level
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = NUQSGDMaxNormCompressor(self._device, self._quantization_level)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
with self._timer("reduce.norm", verbosity=2):
norm = flat_grad.buffer.norm()
if self.n_workers > 1:
collected_norms = [torch.empty_like(norm) for _ in range(self.n_workers)]
norms_gather_op = torch.distributed.all_gather(tensor_list=collected_norms, tensor=norm, async_op=True)
norms_gather_op.wait()
max_norm = max(collected_norms)
else:
max_norm = norm
with self._timer("reduce.compress", verbosity=2):
sign_xi_array = compressor.compress(max_norm, flat_grad.buffer)
with self._timer("reduce.reduce.vector", verbosity=2):
if self.n_workers > 1:
sign_xi_reduce_op = torch.distributed.all_reduce(tensor=sign_xi_array, async_op=True)
sign_xi_reduce_op.wait()
sign_xi_array.true_divide(self.n_workers)
else:
sign_xi_array = sign_xi_array
bits_communicated += self.n_bits(norm) + self.n_bits(sign_xi_array)
with self._timer("reduce.decompress", verbosity=2):
flat_grad.buffer = compressor.decompress(max_norm, sign_xi_array)
with self._timer("reduce.setgrad", verbosity=2):
for out in grad_out:
out[:] = 0.0
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class QSGDMaxNormBiasedReducer(Reducer):
"""
All reduce reducer with Biased QSGD compression and without Elias encoding.
All gathers norms, normalizing with max norm, all reduces floored vector.
"""
def __init__(self, device, timer, quantization_level=8):
super(QSGDMaxNormBiasedReducer, self).__init__(device, timer)
self._quantization_level = quantization_level
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = QSGDMaxNormBiasedCompressor(self._device, self._quantization_level)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
with self._timer("reduce.norm", verbosity=2):
norm = flat_grad.buffer.abs().max()
if self.n_workers > 1:
collected_norms = [torch.empty_like(norm) for _ in range(self.n_workers)]
norms_gather_op = torch.distributed.all_gather(tensor_list=collected_norms, tensor=norm, async_op=True)
norms_gather_op.wait()
max_norm = max(collected_norms)
else:
max_norm = norm
with self._timer("reduce.compress", verbosity=2):
l_array_floored = compressor.compress(max_norm, flat_grad.buffer)
with self._timer("reduce.reduce.vector", verbosity=2):
if self.n_workers > 1:
l_array_floored_op = torch.distributed.all_reduce(tensor=l_array_floored, async_op=True)
l_array_floored_op.wait()
l_array_floored.true_divide(self.n_workers)
else:
l_array_floored = l_array_floored
bits_communicated += self.n_bits(norm) + self.n_bits(l_array_floored)
with self._timer("reduce.decompress", verbosity=2):
flat_grad.buffer = compressor.decompress(max_norm, l_array_floored)
with self._timer("reduce.setgrad", verbosity=2):
for out in grad_out:
out[:] = 0.0
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class QSGDMaxNormBiasedMemoryReducer(Reducer):
"""
All reduce reducer with Biased QSGD compression with memory and without Elias encoding.
All gathers norms, normalizing with max norm, all reduces floored vector.
"""
def __init__(self, device, timer, quantization_level=8):
super(QSGDMaxNormBiasedMemoryReducer, self).__init__(device, timer)
self._quantization_level = quantization_level
self._memory = []
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = QSGDMaxNormBiasedCompressor(self._device, self._quantization_level)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
if not self._memory:
self._memory = [torch.zeros_like(grad_tensor) for grad_tensor in grad_in]
self._memory = TensorBuffer(self._memory)
else:
flat_grad.buffer[:] += self._memory.buffer
with self._timer("reduce.norm", verbosity=2):
norm = flat_grad.buffer.abs().max()
if self.n_workers > 1:
collected_norms = [torch.empty_like(norm) for _ in range(self.n_workers)]
norms_gather_op = torch.distributed.all_gather(tensor_list=collected_norms, tensor=norm, async_op=True)
norms_gather_op.wait()
max_norm = max(collected_norms)
else:
max_norm = norm
with self._timer("reduce.compress", verbosity=2):
l_array_floored = compressor.compress(max_norm, flat_grad.buffer)
with self._timer("reduce.set_memory", verbosity=2):
self._memory.buffer[:] = flat_grad.buffer - l_array_floored
with self._timer("reduce.reduce.vector", verbosity=2):
if self.n_workers > 1:
l_array_floored_op = torch.distributed.all_reduce(tensor=l_array_floored, async_op=True)
l_array_floored_op.wait()
l_array_floored.true_divide(self.n_workers)
else:
l_array_floored = l_array_floored
bits_communicated += self.n_bits(norm) + self.n_bits(l_array_floored)
with self._timer("reduce.decompress", verbosity=2):
flat_grad.buffer = compressor.decompress(max_norm, l_array_floored)
with self._timer("reduce.setgrad", verbosity=2):
for out in grad_out:
out[:] = 0.0
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class NUQSGDMaxNormBiasedReducer(Reducer):
"""
All reduce reducer with Biased NUQSGD compression and without encoding.
All gathers norms, normalizing with max norm, all reduces sign array * xi vector.
"""
def __init__(self, device, timer, quantization_level=8):
super(NUQSGDMaxNormBiasedReducer, self).__init__(device, timer)
self._quantization_level = quantization_level
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = NUQSGDMaxNormBiasedCompressor(self._device, self._quantization_level)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
with self._timer("reduce.norm", verbosity=2):
norm = flat_grad.buffer.abs().max()
if self.n_workers > 1:
collected_norms = [torch.empty_like(norm) for _ in range(self.n_workers)]
norms_gather_op = torch.distributed.all_gather(tensor_list=collected_norms, tensor=norm, async_op=True)
norms_gather_op.wait()
max_norm = max(collected_norms)
else:
max_norm = norm
with self._timer("reduce.compress", verbosity=2):
l_array_floored = compressor.compress(max_norm, flat_grad.buffer)
with self._timer("reduce.reduce.vector", verbosity=2):
if self.n_workers > 1:
l_array_floored_op = torch.distributed.all_reduce(tensor=l_array_floored, async_op=True)
l_array_floored_op.wait()
l_array_floored.true_divide(self.n_workers)
else:
l_array_floored = l_array_floored
bits_communicated += self.n_bits(norm) + self.n_bits(l_array_floored)
with self._timer("reduce.decompress", verbosity=2):
flat_grad.buffer = compressor.decompress(max_norm, l_array_floored)
with self._timer("reduce.setgrad", verbosity=2):
for out in grad_out:
out[:] = 0.0
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class NUQSGDMaxNormBiasedMemoryReducer(Reducer):
"""
All reduce reducer with Biased NUQSGD compression with memory and without encoding.
All gathers norms, normalizing with max norm, all reduces floored vector.
"""
def __init__(self, device, timer, quantization_level=8):
super(NUQSGDMaxNormBiasedMemoryReducer, self).__init__(device, timer)
self._quantization_level = quantization_level
self._memory = []
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = NUQSGDMaxNormBiasedCompressor(self._device, self._quantization_level)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
if not self._memory:
self._memory = [torch.zeros_like(grad_tensor) for grad_tensor in grad_in]
self._memory = TensorBuffer(self._memory)
else:
flat_grad.buffer[:] += self._memory.buffer
with self._timer("reduce.norm", verbosity=2):
norm = flat_grad.buffer.abs().max()
if self.n_workers > 1:
collected_norms = [torch.empty_like(norm) for _ in range(self.n_workers)]
norms_gather_op = torch.distributed.all_gather(tensor_list=collected_norms, tensor=norm, async_op=True)
norms_gather_op.wait()
max_norm = max(collected_norms)
else:
max_norm = norm
with self._timer("reduce.compress", verbosity=2):
l_array_floored = compressor.compress(max_norm, flat_grad.buffer)
with self._timer("reduce.set_memory", verbosity=2):
self._memory.buffer[:] = flat_grad.buffer - l_array_floored
with self._timer("reduce.reduce.vector", verbosity=2):
if self.n_workers > 1:
l_array_floored_op = torch.distributed.all_reduce(tensor=l_array_floored, async_op=True)
l_array_floored_op.wait()
l_array_floored.true_divide(self.n_workers)
else:
l_array_floored = l_array_floored
bits_communicated += self.n_bits(norm) + self.n_bits(l_array_floored)
with self._timer("reduce.decompress", verbosity=2):
flat_grad.buffer = compressor.decompress(max_norm, l_array_floored)
with self._timer("reduce.setgrad", verbosity=2):
for out in grad_out:
out[:] = 0.0
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class TopKReducer(Reducer):
"""
TopK reducer with K most important gradient updates layerwise.
All gathers values and indices of top-K from each worker and updates.
"""
def __init__(self, device, timer, K=100):
super(TopKReducer, self).__init__(device, timer)
self._K = K
self._memory = []
def reduce(self, grad_in, grad_out):
bits_communicated = 0
if not self._memory:
self._memory = [torch.zeros_like(grad_tensor) for grad_tensor in grad_in]
else:
for grad, memory in zip(grad_in, self._memory):
grad.add_(other=memory, alpha=1)
with self._timer("reduce.flatpack", verbosity=2):
flat_grad_size = 0
tensor_topK_indices = [0]
for tensor in grad_in:
top_size = min(tensor.nelement(), self._K)
flat_grad_size += top_size
tensor_topK_indices.append(tensor_topK_indices[-1] + top_size)
flat_grad_start_indices = tensor_topK_indices[:-1]
flat_grad_end_indices = tensor_topK_indices[1:]
flat_values = torch.empty(flat_grad_size, device=self._device)
flat_positions = torch.empty(flat_grad_size, device=self._device, dtype=torch.int)
with self._timer("reduce.topk", verbosity=2):
for tensor, start, end in zip(grad_in, flat_grad_start_indices, flat_grad_end_indices):
top_size = min(tensor.nelement(), self._K)
_, positions = torch.topk(tensor.view(-1).abs(), top_size, sorted=False)
values = tensor.view(-1)[positions].contiguous()
flat_values[start:end] = values
flat_positions[start:end] = positions
with self._timer("reduce.memory", verbosity=2):
for tensor, mem, start, end in zip(grad_in, self._memory, flat_grad_start_indices, flat_grad_end_indices):
positions = flat_positions[start:end]
mem[:] = tensor
mem.view(-1)[positions.long()] = 0.0
with self._timer("reduce.gather", verbosity=2):
if self.n_workers > 1:
collected_values = [torch.empty_like(flat_values) for _ in range(self.n_workers)]
values_gather_op = torch.distributed.all_gather(
tensor_list=collected_values, tensor=flat_values, async_op=True
)
collected_positions = [torch.empty_like(flat_positions) for _ in range(self.n_workers)]
positions_gather_op = torch.distributed.all_gather(
tensor_list=collected_positions,
tensor=flat_positions,
async_op=True,
)
values_gather_op.wait()
positions_gather_op.wait()
else:
collected_values = [flat_values]
collected_positions = [flat_positions]
bits_communicated += self.n_bits(flat_values) + self.n_bits(flat_positions)
with self._timer("reduce.combine", verbosity=2):
for out, start, end in zip(grad_out, flat_grad_start_indices, flat_grad_end_indices):
out[:] = 0
for pos, val in zip(collected_positions, collected_values):
positions = pos[start:end]
values = val[start:end]
out.view(-1)[positions.long()] += values / self.n_workers
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class TopKReducerRatio(Reducer):
"""
TopK reducer with ratio most important gradient updates layerwise.
All gathers values and indices of top-K from each worker and updates.
"""
def __init__(self, device, timer, compression=1 / 100):
super(TopKReducerRatio, self).__init__(device, timer)
self._compression = compression
self._memory = []
def reduce(self, grad_in, grad_out):
bits_communicated = 0
if not self._memory:
self._memory = [torch.zeros_like(grad_tensor) for grad_tensor in grad_in]
else:
for grad, memory in zip(grad_in, self._memory):
grad.add_(other=memory, alpha=1)
with self._timer("reduce.flatpack", verbosity=2):
flat_grad_size = 0
tensor_topK_indices = [0]
for tensor in grad_in:
top_size = max(1, int(self._compression * tensor.nelement()))
flat_grad_size += top_size
tensor_topK_indices.append(tensor_topK_indices[-1] + top_size)
flat_grad_start_indices = tensor_topK_indices[:-1]
flat_grad_end_indices = tensor_topK_indices[1:]
flat_values = torch.empty(flat_grad_size, device=self._device)
flat_positions = torch.empty(flat_grad_size, device=self._device, dtype=torch.int)
with self._timer("reduce.topk", verbosity=2):
for tensor, start, end in zip(grad_in, flat_grad_start_indices, flat_grad_end_indices):
top_size = max(1, int(self._compression * tensor.nelement()))
_, positions = torch.topk(tensor.view(-1).abs(), top_size, sorted=False)
values = tensor.view(-1)[positions].contiguous()
flat_values[start:end] = values
flat_positions[start:end] = positions
with self._timer("reduce.memory", verbosity=2):
for tensor, mem, start, end in zip(grad_in, self._memory, flat_grad_start_indices, flat_grad_end_indices):
positions = flat_positions[start:end]
mem[:] = tensor
mem.view(-1)[positions.long()] = 0.0
with self._timer("reduce.gather", verbosity=2):
if self.n_workers > 1:
collected_values = [torch.empty_like(flat_values) for _ in range(self.n_workers)]
values_gather_op = torch.distributed.all_gather(
tensor_list=collected_values, tensor=flat_values, async_op=True
)
collected_positions = [torch.empty_like(flat_positions) for _ in range(self.n_workers)]
positions_gather_op = torch.distributed.all_gather(
tensor_list=collected_positions,
tensor=flat_positions,
async_op=True,
)
values_gather_op.wait()
positions_gather_op.wait()
else:
collected_values = [flat_values]
collected_positions = [flat_positions]
bits_communicated += self.n_bits(flat_values) + self.n_bits(flat_positions)
with self._timer("reduce.combine", verbosity=2):
for out, start, end in zip(grad_out, flat_grad_start_indices, flat_grad_end_indices):
out[:] = 0
for pos, val in zip(collected_positions, collected_values):
positions = pos[start:end]
values = val[start:end]
out.view(-1)[positions.long()] += values / self.n_workers
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class GlobalTopKReducer(Reducer):
"""
TopK reducer with K most important gradient updates global.
All gathers values and indices of top-K from each worker and updates.
"""
def __init__(self, device, timer, K=10000):
super(GlobalTopKReducer, self).__init__(device, timer)
self._K = K
self._memory = []
def reduce(self, grad_in, grad_out):
bits_communicated = 0
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
if not self._memory:
self._memory = [torch.zeros_like(grad_tensor) for grad_tensor in grad_in]
self._memory = TensorBuffer(self._memory)
else:
flat_grad.buffer[:] += self._memory.buffer
top_size = min(flat_grad.buffer.nelement(), self._K)
with self._timer("reduce.topk", verbosity=2):
_, positions = torch.topk(flat_grad.buffer.abs(), top_size, sorted=False)
values = flat_grad.buffer[positions].contiguous()
with self._timer("reduce.set_memory", verbosity=2):
self._memory.buffer[:] = flat_grad.buffer
self._memory.buffer[positions] = 0.0
with self._timer("reduce.gather", verbosity=2):
if self.n_workers > 1:
collected_values = [torch.empty_like(values) for _ in range(self.n_workers)]
values_gather_op = torch.distributed.all_gather(
tensor_list=collected_values, tensor=values, async_op=True
)
collected_positions = [torch.empty_like(positions) for _ in range(self.n_workers)]
positions_gather_op = torch.distributed.all_gather(
tensor_list=collected_positions, tensor=positions, async_op=True
)
values_gather_op.wait()
positions_gather_op.wait()
else:
collected_values = [values]
collected_positions = [positions]
bits_communicated += self.n_bits(values) + self.n_bits(positions)
with self._timer("reduce.combine", verbosity=2):
for pos, val in zip(collected_positions, collected_values):
flat_grad.buffer[pos] += val / self.n_workers
with self._timer("reduce.setgrad", verbosity=2):
for out in grad_out:
out[:] = 0.0
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class GlobalTopKReducerRatio(Reducer):
"""
TopK reducer with ratio most important gradient updates global.
All gathers values and indices of top-K from each worker and updates.
"""
def __init__(self, device, timer, compression=1 / 100):
super(GlobalTopKReducerRatio, self).__init__(device, timer)
self._compression = compression
self._memory = []
def reduce(self, grad_in, grad_out):
bits_communicated = 0
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
if not self._memory:
self._memory = [torch.zeros_like(grad_tensor) for grad_tensor in grad_in]
self._memory = TensorBuffer(self._memory)
else:
flat_grad.buffer[:] += self._memory.buffer
top_size = max(1, int(self._compression * flat_grad.buffer.nelement()))
with self._timer("reduce.topk", verbosity=2):
_, positions = torch.topk(flat_grad.buffer.abs(), top_size, sorted=False)
values = flat_grad.buffer[positions].contiguous()
with self._timer("reduce.set_memory", verbosity=2):
self._memory.buffer[:] = flat_grad.buffer
self._memory.buffer[positions] = 0.0
with self._timer("reduce.gather", verbosity=2):
if self.n_workers > 1:
collected_values = [torch.empty_like(values) for _ in range(self.n_workers)]
values_gather_op = torch.distributed.all_gather(
tensor_list=collected_values, tensor=values, async_op=True
)
collected_positions = [torch.empty_like(positions) for _ in range(self.n_workers)]
positions_gather_op = torch.distributed.all_gather(
tensor_list=collected_positions, tensor=positions, async_op=True
)
values_gather_op.wait()
positions_gather_op.wait()
else:
collected_values = [values]
collected_positions = [positions]
bits_communicated += self.n_bits(values) + self.n_bits(positions)
with self._timer("reduce.combine", verbosity=2):
for pos, val in zip(collected_positions, collected_values):
flat_grad.buffer[pos] += val / self.n_workers
with self._timer("reduce.setgrad", verbosity=2):
for out in grad_out:
out[:] = 0.0
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class QSGDMaxNormTwoScaleReducer(Reducer):
"""
All reduce reducer with QSGD MaxNorm Two Level compression.
All gathers norms, normalizing with max norm, find common low resolution mask,
All reduces two scale sign array * xi vector.
"""
def __init__(self, device, timer, lower_quantization_level=6, higher_quantization_level=10):
super(QSGDMaxNormTwoScaleReducer, self).__init__(device, timer)
self._lower_quantization_level = lower_quantization_level
self._higher_quantization_level = higher_quantization_level
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = QSGDMaxNormTwoScaleCompressor(
self._device,
self._lower_quantization_level,
self._higher_quantization_level,
)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
with self._timer("reduce.norm", verbosity=2):
norm = flat_grad.buffer.abs().max()
if self.n_workers > 1:
collected_norms = [torch.empty_like(norm) for _ in range(self.n_workers)]
norms_gather_op = torch.distributed.all_gather(tensor_list=collected_norms, tensor=norm, async_op=True)
norms_gather_op.wait()
max_norm = max(collected_norms)
else:
max_norm = norm
with self._timer("reduce.compress", verbosity=2):
sign_xi_array_lower = compressor.compress_lower(max_norm, flat_grad.buffer)
sign_xi_array_higher, higher_resolution_mask = compressor.compress_higher(max_norm, flat_grad.buffer)
if self.n_workers > 1:
high_mask_op = torch.distributed.all_reduce(
tensor=higher_resolution_mask,
op=torch.distributed.ReduceOp.PRODUCT,
async_op=True,
)
high_mask_op.wait()
else:
higher_resolution_mask = higher_resolution_mask
sign_xi_array = (
higher_resolution_mask * sign_xi_array_higher + (1 - higher_resolution_mask) * sign_xi_array_lower
)
with self._timer("reduce.reduce.vector", verbosity=2):
if self.n_workers > 1:
sign_xi_reduce_op = torch.distributed.all_reduce(tensor=sign_xi_array, async_op=True)
sign_xi_reduce_op.wait()
sign_xi_array.true_divide(self.n_workers)
else:
sign_xi_array = sign_xi_array
bits_communicated += self.n_bits(norm) + self.n_bits(higher_resolution_mask) + self.n_bits(sign_xi_array)
with self._timer("reduce.decompress", verbosity=2):
flat_grad.buffer = compressor.decompress(max_norm, sign_xi_array, higher_resolution_mask)
with self._timer("reduce.setgrad", verbosity=2):
for out in grad_out:
out[:] = 0.0
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class GlobalRandKMaxNormTwoScaleReducer(Reducer):
"""
All reduce reducer with QSGD MaxNorm Two Level compression of random K indices.
All gathers norms, normalizing with max norm, find common low resolution mask,
All reduces two scale sign array * xi vector.
"""
def __init__(
self,
device,
timer,
seed,
K=10000,
lower_quantization_level=6,
higher_quantization_level=10,
):
super(GlobalRandKMaxNormTwoScaleReducer, self).__init__(device, timer)
self._seed = seed
self._lower_quantization_level = lower_quantization_level
self._higher_quantization_level = higher_quantization_level
self._K = K
self._indices_queue = []
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = GlobalRandKMaxNormTwoScaleCompressor(
self._device,
self._lower_quantization_level,
self._higher_quantization_level,
)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
if not self._indices_queue:
set_seed(self._seed)
self._indices_queue = torch.randperm(len(flat_grad.buffer)).split(self._K)
self._indices_queue = list(self._indices_queue)
RandK_indices = self._indices_queue.pop().numpy()
RandK_flat_grad = flat_grad.buffer[RandK_indices]
with self._timer("reduce.norm", verbosity=2):
norm = RandK_flat_grad.abs().max()
if self.n_workers > 1:
collected_norms = [torch.empty_like(norm) for _ in range(self.n_workers)]
norms_gather_op = torch.distributed.all_gather(tensor_list=collected_norms, tensor=norm, async_op=True)
norms_gather_op.wait()
max_norm = max(collected_norms)
else:
max_norm = norm
with self._timer("reduce.compress", verbosity=2):
sign_xi_array_lower = compressor.compress_lower(max_norm, RandK_flat_grad)
sign_xi_array_higher, higher_resolution_mask = compressor.compress_higher(max_norm, RandK_flat_grad)
if self.n_workers > 1:
high_mask_op = torch.distributed.all_reduce(
tensor=higher_resolution_mask,
op=torch.distributed.ReduceOp.PRODUCT,
async_op=True,
)
high_mask_op.wait()
else:
higher_resolution_mask = higher_resolution_mask
sign_xi_array = (
higher_resolution_mask * sign_xi_array_higher + (1 - higher_resolution_mask) * sign_xi_array_lower
)
with self._timer("reduce.reduce.vector", verbosity=2):
if self.n_workers > 1:
sign_xi_reduce_op = torch.distributed.all_reduce(tensor=sign_xi_array, async_op=True)
sign_xi_reduce_op.wait()
sign_xi_array.true_divide(self.n_workers)
else:
sign_xi_array = sign_xi_array
bits_communicated += self.n_bits(norm) + self.n_bits(higher_resolution_mask) + self.n_bits(sign_xi_array)
with self._timer("reduce.decompress", verbosity=2):
RandK_decompressed = compressor.decompress(max_norm, sign_xi_array, higher_resolution_mask)
with self._timer("reduce.setgrad", verbosity=2):
flat_grad.buffer[RandK_indices] = RandK_decompressed
for out in grad_out:
out[:] = 0.0
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class QSGDMaxNormMultiScaleReducer(Reducer):
"""
All reduce reducer with QSGD MaxNorm Multi Level compression.
All gathers norms, normalizing with max norm, find common low resolution mask,
All reduces two scale sign array * xi vector.
"""
def __init__(self, device, timer, quantization_levels=None):
super(QSGDMaxNormMultiScaleReducer, self).__init__(device, timer)
if not quantization_levels:
quantization_levels = [6, 10]
quantization_levels.sort()
self._quantization_levels = quantization_levels
def reduce(self, grad_in, grad_out):
bits_communicated = 0
compressor = QSGDMaxNormMultiScaleCompressor(
self._device,
self._quantization_levels,
)
with self._timer("reduce.flat_pack"):
flat_grad = TensorBuffer(grad_in)
with self._timer("reduce.norm", verbosity=2):
norm = flat_grad.buffer.abs().max()
if self.n_workers > 1:
norm_op = torch.distributed.all_reduce(
tensor=norm,
op=torch.distributed.ReduceOp.MAX,
async_op=True,
)
norm_op.wait()
max_norm = norm
else:
max_norm = norm
with self._timer("reduce.compress", verbosity=2):
resolution_mask = compressor.compress_mask(max_norm, flat_grad.buffer)
if self.n_workers > 1:
high_mask_op = torch.distributed.all_reduce(
tensor=resolution_mask,
op=torch.distributed.ReduceOp.MIN,
async_op=True,
)
high_mask_op.wait()
else:
resolution_mask = resolution_mask
sign_xi_array = compressor.compress(resolution_mask)
with self._timer("reduce.reduce.vector", verbosity=2):
if self.n_workers > 1:
sign_xi_reduce_op = torch.distributed.all_reduce(tensor=sign_xi_array, async_op=True)
sign_xi_reduce_op.wait()
sign_xi_array.true_divide(self.n_workers)
else:
sign_xi_array = sign_xi_array
bits_communicated += self.n_bits(norm) + self.n_bits(resolution_mask) + self.n_bits(sign_xi_array)
with self._timer("reduce.decompress", verbosity=2):
flat_grad.buffer = compressor.decompress(max_norm, sign_xi_array, resolution_mask)
with self._timer("reduce.setgrad", verbosity=2):
for out in grad_out:
out[:] = 0.0
for grad, out in zip(flat_grad, grad_out):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
class RankKReducer(Reducer):
def __init__(self, device, timer, n_power_iterations=0, reuse_query=False, rank=1):
super().__init__(device, timer)
assert n_power_iterations == 0
self.rank = rank
self.p_memory = None
self.q_memory = None
self.reuse_query = reuse_query
self._memory = []
def reduce(self, grad_in, grad_out):
bits_communicated = 0
if not self._memory:
self._memory = [torch.zeros_like(grad) for grad in grad_in]
self._memory = TensorBuffer(self._memory)
else:
for grad, mem in zip(grad_in, self._memory):
grad[:] += mem
rank1_tensors = [
(tensor, out, mem) for tensor, out, mem in zip(grad_in, grad_out, self._memory) if tensor.ndimension() <= 1
]
high_rank_tensors = [
(tensor, out, mem) for tensor, out, mem in zip(grad_in, grad_out, self._memory) if tensor.ndimension() > 1
]
memory_is_uninitialized = self.p_memory is None
with self._timer("reduce.allocate_memory", verbosity=2):
p_total_size = 0
q_total_size = 0
for tensor, _, _ in high_rank_tensors:
matrix = tensor.view(tensor.shape[0], -1)
n, m = matrix.shape
rank = min(n, m, self.rank)
p_total_size += n * rank
q_total_size += m * rank
if self.p_memory is None:
self.p_memory = torch.empty(p_total_size, device=self._device)
self.q_memory = torch.empty(q_total_size, device=self._device)
ps = []
qs = []
p_idx = 0
q_idx = 0
for tensor, _, _ in high_rank_tensors:
matrix = tensor.view(tensor.shape[0], -1)
n, m = matrix.shape
rank = min(n, m, self.rank)
ps.append(self.p_memory[p_idx : p_idx + n * rank].view(n, rank))
qs.append(self.q_memory[q_idx : q_idx + m * rank].view(m, rank))
p_idx += n * rank
q_idx += m * rank
with self._timer("reduce.prepare.q", verbosity=2):
for (tensor, _, _), q, p in zip(high_rank_tensors, qs, ps):
matrix = tensor.view(tensor.shape[0], -1)
n, m = matrix.shape
if self.reuse_query and not memory_is_uninitialized:
# orthogonalize(q)
pass
else:
q.normal_()
with self._timer("reduce.compute.p", verbosity=2):
for (tensor, _, _), q, p in zip(high_rank_tensors, qs, ps):
matrix = tensor.view(tensor.shape[0], -1)
torch.matmul(matrix, q, out=p)
with self._timer("reduce.p", verbosity=2):
if self.n_workers > 1:
p_memory_reduce_op = torch.distributed.all_reduce(tensor=self.p_memory, async_op=True)
p_memory_reduce_op.wait()
else:
self.p_memory = self.p_memory
bits_communicated += self.n_bits(self.p_memory)
with self._timer("reduce.rank1.pack", verbosity=2):
rank1_tensor_list = TensorBuffer([tensor for (tensor, _, _) in rank1_tensors])
with self._timer("reduce.rank1.all_reduce", verbosity=2):
if self.n_workers > 1:
tensor_reduce_op = torch.distributed.all_reduce(tensor=rank1_tensor_list.buffer, async_op=True)
tensor_reduce_op.wait()
else:
rank1_tensor_list = rank1_tensor_list
bits_communicated += self.n_bits(rank1_tensor_list.buffer)
with self._timer("reduce.normalize.p", verbosity=2):
for p in ps:
self.orthogonalize(p)
with self._timer("reduce.compute.q", verbosity=2):
for p, q, (tensor, _, _) in zip(ps, qs, high_rank_tensors):
matrix = tensor.view(tensor.shape[0], -1)
torch.matmul(matrix.t(), p, out=q)
with self._timer("reduce.q", verbosity=2):
if self.n_workers > 1:
q_memory_reduce_op = torch.distributed.all_reduce(tensor=self.q_memory, async_op=True)
q_memory_reduce_op.wait()
else:
self.q_memory = self.q_memory
bits_communicated += self.n_bits(self.q_memory)
self.q_memory.data[:] /= self.n_workers
with self._timer("reduce.outerprod", verbosity=2):
for p, q, (tensor, out, mem) in zip(ps, qs, high_rank_tensors):
torch.matmul(p, q.t(), out=out[:])
mem[:] = tensor - out
with self._timer("reduce.rank1.unpack", verbosity=2):
tensor_reduce_op.wait()
rank1_tensor_list.buffer /= self.n_workers
for grad, out in zip(rank1_tensor_list, [out for (_, out, _) in rank1_tensors]):
grad = grad.to(self._device)
out.add_(other=grad, alpha=1 / self.n_workers)
return bits_communicated
# @torch.jit.script
def orthogonalize(self, matrix, eps=torch.tensor(1e-8)):
n, m = matrix.shape
for i in range(m):
col = matrix[:, i : i + 1]
col /= torch.sqrt(torch.sum(col ** 2)) + eps
if i + 1 < m:
rest = matrix[:, i + 1 :]
rest -= torch.sum(col * rest, dim=0) * col
def n_bits(self, tensor):
return 8 * tensor.nelement() * tensor.element_size()
| 39.185046
| 121
| 0.616391
| 8,603
| 72,845
| 4.906544
| 0.032547
| 0.021085
| 0.044041
| 0.064367
| 0.893795
| 0.875459
| 0.868992
| 0.856317
| 0.845467
| 0.828481
| 0
| 0.008106
| 0.290452
| 72,845
| 1,858
| 122
| 39.206136
| 0.808559
| 0.120804
| 0
| 0.770627
| 0
| 0
| 0.03155
| 0.000709
| 0
| 0
| 0
| 0
| 0.000825
| 1
| 0.061881
| false
| 0.000825
| 0.0033
| 0.020627
| 0.125413
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d0fdd4d8b1937829d755d7177fb0fa87a748478
| 12,177
|
py
|
Python
|
testsuite/spline-reg/run.py
|
luyatshimbalanga/OpenShadingLanguage
|
2120647911af732f0d12d70e2f7f4e1ebe8fadcb
|
[
"BSD-3-Clause"
] | 1,105
|
2015-01-02T20:47:19.000Z
|
2021-01-25T13:20:56.000Z
|
testsuite/spline-reg/run.py
|
luyatshimbalanga/OpenShadingLanguage
|
2120647911af732f0d12d70e2f7f4e1ebe8fadcb
|
[
"BSD-3-Clause"
] | 696
|
2015-01-07T23:42:08.000Z
|
2021-01-25T03:55:08.000Z
|
testsuite/spline-reg/run.py
|
luyatshimbalanga/OpenShadingLanguage
|
2120647911af732f0d12d70e2f7f4e1ebe8fadcb
|
[
"BSD-3-Clause"
] | 248
|
2015-01-05T13:41:28.000Z
|
2021-01-24T23:29:55.000Z
|
#!/usr/bin/env python
# Copyright Contributors to the Open Shading Language project.
# SPDX-License-Identifier: BSD-3-Clause
# https://github.com/AcademySoftwareFoundation/OpenShadingLanguage
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_c_float_v_floatarray.tif test_spline_c_float_v_floatarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_c_float_u_floatarray.tif test_spline_c_float_u_floatarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_c_float_c_floatarray.tif test_spline_c_float_c_floatarray")
outputs.append ("spline_c_float_v_floatarray.tif")
outputs.append ("spline_c_float_u_floatarray.tif")
outputs.append ("spline_c_float_c_floatarray.tif")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_u_float_v_floatarray.tif test_spline_u_float_v_floatarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_u_float_u_floatarray.tif test_spline_u_float_u_floatarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_u_float_c_floatarray.tif test_spline_u_float_c_floatarray")
outputs.append ("spline_u_float_v_floatarray.tif")
outputs.append ("spline_u_float_u_floatarray.tif")
outputs.append ("spline_u_float_c_floatarray.tif")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_v_float_v_floatarray.tif test_spline_v_float_v_floatarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_v_float_u_floatarray.tif test_spline_v_float_u_floatarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Fout spline_v_float_c_floatarray.tif test_spline_v_float_c_floatarray")
outputs.append ("spline_v_float_v_floatarray.tif")
outputs.append ("spline_v_float_u_floatarray.tif")
outputs.append ("spline_v_float_c_floatarray.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_c_float_v_floatarray.tif test_deriv_spline_c_float_v_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_c_float_u_floatarray.tif test_deriv_spline_c_float_u_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_c_float_c_floatarray.tif test_deriv_spline_c_float_c_floatarray")
outputs.append ("deriv_spline_c_float_v_floatarray.tif")
outputs.append ("deriv_spline_c_float_u_floatarray.tif")
outputs.append ("deriv_spline_c_float_c_floatarray.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_u_float_v_floatarray.tif test_deriv_spline_u_float_v_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_u_float_u_floatarray.tif test_deriv_spline_u_float_u_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_u_float_c_floatarray.tif test_deriv_spline_u_float_c_floatarray")
outputs.append ("deriv_spline_u_float_v_floatarray.tif")
outputs.append ("deriv_spline_u_float_u_floatarray.tif")
outputs.append ("deriv_spline_u_float_c_floatarray.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_v_float_v_floatarray.tif test_deriv_spline_v_float_v_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_v_float_u_floatarray.tif test_deriv_spline_v_float_u_floatarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValDxDyOut deriv_spline_v_float_c_floatarray.tif test_deriv_spline_v_float_c_floatarray")
outputs.append ("deriv_spline_v_float_v_floatarray.tif")
outputs.append ("deriv_spline_v_float_u_floatarray.tif")
outputs.append ("deriv_spline_v_float_c_floatarray.tif")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_c_float_v_colorarray.tif test_spline_c_float_v_colorarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_c_float_u_colorarray.tif test_spline_c_float_u_colorarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_c_float_c_colorarray.tif test_spline_c_float_c_colorarray")
outputs.append ("spline_c_float_v_colorarray.tif")
outputs.append ("spline_c_float_u_colorarray.tif")
outputs.append ("spline_c_float_c_colorarray.tif")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_u_float_v_colorarray.tif test_spline_u_float_v_colorarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_u_float_u_colorarray.tif test_spline_u_float_u_colorarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_u_float_c_colorarray.tif test_spline_u_float_c_colorarray")
outputs.append ("spline_u_float_v_colorarray.tif")
outputs.append ("spline_u_float_u_colorarray.tif")
outputs.append ("spline_u_float_c_colorarray.tif")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_v_float_v_colorarray.tif test_spline_v_float_v_colorarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_v_float_u_colorarray.tif test_spline_v_float_u_colorarray")
command += testshade("-t 1 -g 64 64 -od uint8 -o Cout spline_v_float_c_colorarray.tif test_spline_v_float_c_colorarray")
outputs.append ("spline_v_float_v_colorarray.tif")
outputs.append ("spline_v_float_u_colorarray.tif")
outputs.append ("spline_v_float_c_colorarray.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_c_float_v_colorarray.tif -o DxOut deriv_spline_c_float_v_colorarrayDx.tif -o DyOut deriv_spline_c_float_v_colorarrayDy.tif test_deriv_spline_c_float_v_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_c_float_u_colorarray.tif -o DxOut deriv_spline_c_float_u_colorarrayDx.tif -o DyOut deriv_spline_c_float_u_colorarrayDy.tif test_deriv_spline_c_float_u_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_c_float_c_colorarray.tif -o DxOut deriv_spline_c_float_c_colorarrayDx.tif -o DyOut deriv_spline_c_float_c_colorarrayDy.tif test_deriv_spline_c_float_c_colorarray")
outputs.append ("deriv_spline_c_float_v_colorarray.tif")
outputs.append ("deriv_spline_c_float_v_colorarrayDx.tif")
outputs.append ("deriv_spline_c_float_v_colorarrayDy.tif")
outputs.append ("deriv_spline_c_float_u_colorarray.tif")
outputs.append ("deriv_spline_c_float_u_colorarrayDx.tif")
outputs.append ("deriv_spline_c_float_u_colorarrayDy.tif")
outputs.append ("deriv_spline_c_float_c_colorarray.tif")
outputs.append ("deriv_spline_c_float_c_colorarrayDx.tif")
outputs.append ("deriv_spline_c_float_c_colorarrayDy.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_u_float_v_colorarray.tif -o DxOut deriv_spline_u_float_v_colorarrayDx.tif -o DyOut deriv_spline_u_float_v_colorarrayDy.tif test_deriv_spline_u_float_v_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_u_float_u_colorarray.tif -o DxOut deriv_spline_u_float_u_colorarrayDx.tif -o DyOut deriv_spline_u_float_u_colorarrayDy.tif test_deriv_spline_u_float_u_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_u_float_c_colorarray.tif -o DxOut deriv_spline_u_float_c_colorarrayDx.tif -o DyOut deriv_spline_u_float_c_colorarrayDy.tif test_deriv_spline_u_float_c_colorarray")
outputs.append ("deriv_spline_u_float_v_colorarray.tif")
outputs.append ("deriv_spline_u_float_v_colorarrayDx.tif")
outputs.append ("deriv_spline_u_float_v_colorarrayDy.tif")
outputs.append ("deriv_spline_u_float_u_colorarray.tif")
outputs.append ("deriv_spline_u_float_u_colorarrayDx.tif")
outputs.append ("deriv_spline_u_float_u_colorarrayDy.tif")
outputs.append ("deriv_spline_u_float_c_colorarray.tif")
outputs.append ("deriv_spline_u_float_c_colorarrayDx.tif")
outputs.append ("deriv_spline_u_float_c_colorarrayDy.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_v_float_v_colorarray.tif -o DxOut deriv_spline_v_float_v_colorarrayDx.tif -o DyOut deriv_spline_v_float_v_colorarrayDy.tif test_deriv_spline_v_float_v_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_v_float_u_colorarray.tif -o DxOut deriv_spline_v_float_u_colorarrayDx.tif -o DyOut deriv_spline_v_float_u_colorarrayDy.tif test_deriv_spline_v_float_u_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_v_float_c_colorarray.tif -o DxOut deriv_spline_v_float_c_colorarrayDx.tif -o DyOut deriv_spline_v_float_c_colorarrayDy.tif test_deriv_spline_v_float_c_colorarray")
outputs.append ("deriv_spline_v_float_v_colorarray.tif")
outputs.append ("deriv_spline_v_float_v_colorarrayDx.tif")
outputs.append ("deriv_spline_v_float_v_colorarrayDy.tif")
outputs.append ("deriv_spline_v_float_u_colorarray.tif")
outputs.append ("deriv_spline_v_float_u_colorarrayDx.tif")
outputs.append ("deriv_spline_v_float_u_colorarrayDy.tif")
outputs.append ("deriv_spline_v_float_c_colorarray.tif")
outputs.append ("deriv_spline_v_float_c_colorarrayDx.tif")
outputs.append ("deriv_spline_v_float_c_colorarrayDy.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_vNoDeriv_float_v_colorarray.tif -o DxOut deriv_spline_vNoDeriv_float_v_colorarrayDx.tif -o DyOut deriv_spline_vNoDeriv_float_v_colorarrayDy.tif test_deriv_spline_vNoDeriv_float_v_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_vNoDeriv_float_u_colorarray.tif -o DxOut deriv_spline_vNoDeriv_float_u_colorarrayDx.tif -o DyOut deriv_spline_vNoDeriv_float_u_colorarrayDy.tif test_deriv_spline_vNoDeriv_float_u_colorarray")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_vNoDeriv_float_c_colorarray.tif -o DxOut deriv_spline_vNoDeriv_float_c_colorarrayDx.tif -o DyOut deriv_spline_vNoDeriv_float_c_colorarrayDy.tif test_deriv_spline_vNoDeriv_float_c_colorarray")
outputs.append ("deriv_spline_vNoDeriv_float_v_colorarray.tif")
outputs.append ("deriv_spline_vNoDeriv_float_v_colorarrayDx.tif")
outputs.append ("deriv_spline_vNoDeriv_float_v_colorarrayDy.tif")
outputs.append ("deriv_spline_vNoDeriv_float_u_colorarray.tif")
outputs.append ("deriv_spline_vNoDeriv_float_u_colorarrayDx.tif")
outputs.append ("deriv_spline_vNoDeriv_float_u_colorarrayDy.tif")
outputs.append ("deriv_spline_vNoDeriv_float_c_colorarray.tif")
outputs.append ("deriv_spline_vNoDeriv_float_c_colorarrayDx.tif")
outputs.append ("deriv_spline_vNoDeriv_float_c_colorarrayDy.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_v_float_vNoDeriv_colorarray.tif -o DxOut deriv_spline_v_float_vNoDeriv_colorarrayDx.tif -o DyOut deriv_spline_v_float_vNoDeriv_colorarrayDy.tif test_deriv_spline_v_float_vNoDeriv_colorarray")
outputs.append ("deriv_spline_v_float_vNoDeriv_colorarray.tif")
outputs.append ("deriv_spline_v_float_vNoDeriv_colorarrayDx.tif")
outputs.append ("deriv_spline_v_float_vNoDeriv_colorarrayDy.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_u_float_vNoDeriv_colorarray.tif -o DxOut deriv_spline_u_float_vNoDeriv_colorarrayDx.tif -o DyOut deriv_spline_u_float_vNoDeriv_colorarrayDy.tif test_deriv_spline_u_float_vNoDeriv_colorarray")
outputs.append ("deriv_spline_u_float_vNoDeriv_colorarray.tif")
outputs.append ("deriv_spline_u_float_vNoDeriv_colorarrayDx.tif")
outputs.append ("deriv_spline_u_float_vNoDeriv_colorarrayDy.tif")
command += testshade("--vary_udxdy --vary_vdxdy -t 1 -g 64 64 -od uint8 -o ValOut deriv_spline_c_float_vNoDeriv_colorarray.tif -o DxOut deriv_spline_c_float_vNoDeriv_colorarrayDx.tif -o DyOut deriv_spline_c_float_vNoDeriv_colorarrayDy.tif test_deriv_spline_c_float_vNoDeriv_colorarray")
outputs.append ("deriv_spline_c_float_vNoDeriv_colorarray.tif")
outputs.append ("deriv_spline_c_float_vNoDeriv_colorarrayDx.tif")
outputs.append ("deriv_spline_c_float_vNoDeriv_colorarrayDy.tif")
# expect a few LSB failures
failthresh = 0.008
failpercent = 3
| 83.40411
| 286
| 0.839534
| 2,043
| 12,177
| 4.531082
| 0.031816
| 0.156854
| 0.096792
| 0.140002
| 0.978071
| 0.978071
| 0.95949
| 0.77671
| 0.361024
| 0.361024
| 0
| 0.022838
| 0.072267
| 12,177
| 145
| 287
| 83.97931
| 0.796583
| 0.017246
| 0
| 0
| 0
| 0.206897
| 0.786556
| 0.586406
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3d639826f680def5da87b001542afbaf2b12d527
| 2,863
|
py
|
Python
|
open_anafi/lib/solde_factory.py
|
Cour-des-comptes/open-anafi-backend
|
1d3ebcfe7b46315e91618f540ef1c95b4e20d9af
|
[
"MIT"
] | 7
|
2020-01-10T09:34:52.000Z
|
2020-01-27T13:51:12.000Z
|
open_anafi/lib/solde_factory.py
|
Cour-des-comptes/open-anafi-backend
|
1d3ebcfe7b46315e91618f540ef1c95b4e20d9af
|
[
"MIT"
] | 6
|
2020-01-26T20:38:07.000Z
|
2022-02-10T12:12:53.000Z
|
open_anafi/lib/solde_factory.py
|
Cour-des-comptes/open-anafi-backend
|
1d3ebcfe7b46315e91618f540ef1c95b4e20d9af
|
[
"MIT"
] | 4
|
2020-01-27T16:44:31.000Z
|
2021-02-11T16:52:26.000Z
|
import logging
logger = logging.getLogger(__name__)
class SoldeFactory():
@staticmethod
def factory(type_solde, solde):
# each lambda function take c: credits dictionary and d: debits dictionnary
if type_solde == "SX":
# Solde Débiteur
if solde == "C":
return_func = lambda c, d: c[1] + c[2] + c[3] + c[4] - d[7]
elif solde == "D":
return_func = lambda c, d: d[1] + d[2] + d[3] + d[4] - c[7]
elif solde == "SC":
return_func = lambda c, d: c[1] + c[2] + c[3] + c[4] - d[7]
elif solde == "SD":
return_func = lambda c, d: d[1] + d[2] + d[3] + d[4] - c[7]
else:
logger.debug(f'{type_solde} {solde}')
raise ValueError("Wrong type_solde and solde association")
elif type_solde == "SS":
# Solde Créditeur
if solde == "C":
return_func = lambda c, d: c[2] + c[3] + c[4] - d[7]
elif solde == "D":
return_func = lambda c, d: d[2] + d[3] + d[4] - c[7]
elif solde == "SC":
return_func = lambda c, d: c[1] + c[2] + c[3] + c[4] - d[7]
elif solde == "SD":
return_func = lambda c, d: d[1] + d[2] + d[3] + d[4] - c[7]
else:
logger.debug(f'{type_solde} {solde}')
raise ValueError("Wrong type_solde and solde association")
elif type_solde == "BR":
if solde == "C":
return_func = lambda c, d: c[2] - c[6] - d[7]
elif solde == "D":
return_func = lambda c, d: d[2] - d[6] - c[7]
elif solde == "SD":
return_func = lambda c, d: d[2] - d[6] - d[7]
elif solde == "SC":
return_func = lambda c, d: c[2] - c[6] - c[7]
else:
logger.debug(f'{type_solde} {solde}')
raise ValueError("Wrong type_solde and solde association")
elif type_solde == "BO":
if solde == "C":
return_func = lambda c, d: c[6]
elif solde == "D":
return_func = lambda c, d: d[6]
else:
logger.debug(f'{type_solde} {solde}')
raise ValueError("Wrong type_solde and solde association")
elif type_solde == "BX":
if solde == "C" or solde == "SC":
return_func = lambda c, d: c[2] - d[7]
elif solde == "D" or solde == "SD":
return_func = lambda c, d: d[2] - c[7]
else:
logger.debug(f'{type_solde} {solde}')
raise ValueError("Wrong type_solde and solde association")
elif type_solde == "NB":
if solde == "C":
return_func = lambda c, d: c[3] + c[4]
elif solde == "D":
return_func = lambda c, d: d[3] + d[4]
else:
logger.debug(f'{type_solde} {solde}')
raise ValueError("Wrong type_solde and solde association")
elif type_solde == "BE":
if solde == "C" or solde == "SC":
return_func = lambda c, d: c[1]
elif solde == "D" or solde == "SD":
return_func = lambda c, d: d[1]
else:
logger.debug(f'{type_solde} {solde}')
raise ValueError("Wrong type_solde and solde association")
else:
logger.debug(type_solde)
raise ValueError("Wrong type_solde and solde association")
return return_func
| 34.914634
| 77
| 0.586098
| 474
| 2,863
| 3.436709
| 0.109705
| 0.132597
| 0.19644
| 0.208717
| 0.847145
| 0.845304
| 0.845304
| 0.845304
| 0.845304
| 0.731737
| 0
| 0.028466
| 0.23926
| 2,863
| 82
| 78
| 34.914634
| 0.719467
| 0.036326
| 0
| 0.636364
| 0
| 0
| 0.17852
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012987
| false
| 0
| 0.012987
| 0
| 0.051948
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1823a1e36df36e9a33f2b423cc81ea0a7c9c3401
| 125
|
py
|
Python
|
acousticsim/analysis/formants/__init__.py
|
JoFrhwld/python-acoustic-similarity
|
50f71835532010b2fedf14b0ca3a52d88a9ab380
|
[
"MIT"
] | 5
|
2018-01-15T22:06:20.000Z
|
2022-02-21T07:02:40.000Z
|
acousticsim/analysis/formants/__init__.py
|
JoFrhwld/python-acoustic-similarity
|
50f71835532010b2fedf14b0ca3a52d88a9ab380
|
[
"MIT"
] | null | null | null |
acousticsim/analysis/formants/__init__.py
|
JoFrhwld/python-acoustic-similarity
|
50f71835532010b2fedf14b0ca3a52d88a9ab380
|
[
"MIT"
] | 2
|
2019-11-28T17:06:27.000Z
|
2019-12-05T22:57:28.000Z
|
from .lpc import file_to_formants, signal_to_formants
from .praat import file_to_formants_praat, signal_to_formants_praat
| 20.833333
| 67
| 0.864
| 20
| 125
| 4.9
| 0.4
| 0.408163
| 0.244898
| 0.408163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.104
| 125
| 5
| 68
| 25
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a10aad44bca6f9041a924e5bbfb729ab2af70538
| 40,099
|
py
|
Python
|
influxdb_client/service/variables_service.py
|
rhajek/influxdb-client-python
|
852e6f1b1161df4d67eabc19cdb6b323a46b88e2
|
[
"MIT"
] | null | null | null |
influxdb_client/service/variables_service.py
|
rhajek/influxdb-client-python
|
852e6f1b1161df4d67eabc19cdb6b323a46b88e2
|
[
"MIT"
] | null | null | null |
influxdb_client/service/variables_service.py
|
rhajek/influxdb-client-python
|
852e6f1b1161df4d67eabc19cdb6b323a46b88e2
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
Influx API Service
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: 0.1.0
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from influxdb_client.api_client import ApiClient
class VariablesService(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def delete_variables_id(self, variable_id, **kwargs): # noqa: E501
"""delete a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_variables_id(variable_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str variable_id: id of the variable (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_variables_id_with_http_info(variable_id, **kwargs) # noqa: E501
else:
(data) = self.delete_variables_id_with_http_info(variable_id, **kwargs) # noqa: E501
return data
def delete_variables_id_with_http_info(self, variable_id, **kwargs): # noqa: E501
"""delete a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_variables_id_with_http_info(variable_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str variable_id: id of the variable (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['variable_id', 'zap_trace_span'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_variables_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'variable_id' is set
if ('variable_id' not in local_var_params or
local_var_params['variable_id'] is None):
raise ValueError("Missing the required parameter `variable_id` when calling `delete_variables_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'variable_id' in local_var_params:
path_params['variableID'] = local_var_params['variable_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/variables/{variableID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_variables_id_labels_id(self, variable_id, label_id, **kwargs): # noqa: E501
"""delete a label from a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_variables_id_labels_id(variable_id, label_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str variable_id: ID of the variable (required)
:param str label_id: the label id to delete (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_variables_id_labels_id_with_http_info(variable_id, label_id, **kwargs) # noqa: E501
else:
(data) = self.delete_variables_id_labels_id_with_http_info(variable_id, label_id, **kwargs) # noqa: E501
return data
def delete_variables_id_labels_id_with_http_info(self, variable_id, label_id, **kwargs): # noqa: E501
"""delete a label from a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_variables_id_labels_id_with_http_info(variable_id, label_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str variable_id: ID of the variable (required)
:param str label_id: the label id to delete (required)
:param str zap_trace_span: OpenTracing span context
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['variable_id', 'label_id', 'zap_trace_span'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_variables_id_labels_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'variable_id' is set
if ('variable_id' not in local_var_params or
local_var_params['variable_id'] is None):
raise ValueError("Missing the required parameter `variable_id` when calling `delete_variables_id_labels_id`") # noqa: E501
# verify the required parameter 'label_id' is set
if ('label_id' not in local_var_params or
local_var_params['label_id'] is None):
raise ValueError("Missing the required parameter `label_id` when calling `delete_variables_id_labels_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'variable_id' in local_var_params:
path_params['variableID'] = local_var_params['variable_id'] # noqa: E501
if 'label_id' in local_var_params:
path_params['labelID'] = local_var_params['label_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/variables/{variableID}/labels/{labelID}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_variables(self, **kwargs): # noqa: E501
"""get all variables # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_variables(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str zap_trace_span: OpenTracing span context
:param str org: specifies the organization name of the resource
:param str org_id: specifies the organization id of the resource
:return: Variables
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_variables_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_variables_with_http_info(**kwargs) # noqa: E501
return data
def get_variables_with_http_info(self, **kwargs): # noqa: E501
"""get all variables # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_variables_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param str zap_trace_span: OpenTracing span context
:param str org: specifies the organization name of the resource
:param str org_id: specifies the organization id of the resource
:return: Variables
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['zap_trace_span', 'org', 'org_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_variables" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'org' in local_var_params:
query_params.append(('org', local_var_params['org'])) # noqa: E501
if 'org_id' in local_var_params:
query_params.append(('orgID', local_var_params['org_id'])) # noqa: E501
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/variables', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Variables', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_variables_id(self, variable_id, **kwargs): # noqa: E501
"""get a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_variables_id(variable_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str variable_id: ID of the variable (required)
:param str zap_trace_span: OpenTracing span context
:return: Variable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_variables_id_with_http_info(variable_id, **kwargs) # noqa: E501
else:
(data) = self.get_variables_id_with_http_info(variable_id, **kwargs) # noqa: E501
return data
def get_variables_id_with_http_info(self, variable_id, **kwargs): # noqa: E501
"""get a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_variables_id_with_http_info(variable_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str variable_id: ID of the variable (required)
:param str zap_trace_span: OpenTracing span context
:return: Variable
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['variable_id', 'zap_trace_span'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_variables_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'variable_id' is set
if ('variable_id' not in local_var_params or
local_var_params['variable_id'] is None):
raise ValueError("Missing the required parameter `variable_id` when calling `get_variables_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'variable_id' in local_var_params:
path_params['variableID'] = local_var_params['variable_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/variables/{variableID}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Variable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def get_variables_id_labels(self, variable_id, **kwargs): # noqa: E501
"""list all labels for a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_variables_id_labels(variable_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str variable_id: ID of the variable (required)
:param str zap_trace_span: OpenTracing span context
:return: LabelsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_variables_id_labels_with_http_info(variable_id, **kwargs) # noqa: E501
else:
(data) = self.get_variables_id_labels_with_http_info(variable_id, **kwargs) # noqa: E501
return data
def get_variables_id_labels_with_http_info(self, variable_id, **kwargs): # noqa: E501
"""list all labels for a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_variables_id_labels_with_http_info(variable_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str variable_id: ID of the variable (required)
:param str zap_trace_span: OpenTracing span context
:return: LabelsResponse
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['variable_id', 'zap_trace_span'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_variables_id_labels" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'variable_id' is set
if ('variable_id' not in local_var_params or
local_var_params['variable_id'] is None):
raise ValueError("Missing the required parameter `variable_id` when calling `get_variables_id_labels`") # noqa: E501
collection_formats = {}
path_params = {}
if 'variable_id' in local_var_params:
path_params['variableID'] = local_var_params['variable_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/variables/{variableID}/labels', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LabelsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def patch_variables_id(self, variable_id, variable, **kwargs): # noqa: E501
"""update a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_variables_id(variable_id, variable, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str variable_id: id of the variable (required)
:param Variable variable: variable update to apply (required)
:param str zap_trace_span: OpenTracing span context
:return: Variable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.patch_variables_id_with_http_info(variable_id, variable, **kwargs) # noqa: E501
else:
(data) = self.patch_variables_id_with_http_info(variable_id, variable, **kwargs) # noqa: E501
return data
def patch_variables_id_with_http_info(self, variable_id, variable, **kwargs): # noqa: E501
"""update a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.patch_variables_id_with_http_info(variable_id, variable, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str variable_id: id of the variable (required)
:param Variable variable: variable update to apply (required)
:param str zap_trace_span: OpenTracing span context
:return: Variable
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['variable_id', 'variable', 'zap_trace_span'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method patch_variables_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'variable_id' is set
if ('variable_id' not in local_var_params or
local_var_params['variable_id'] is None):
raise ValueError("Missing the required parameter `variable_id` when calling `patch_variables_id`") # noqa: E501
# verify the required parameter 'variable' is set
if ('variable' not in local_var_params or
local_var_params['variable'] is None):
raise ValueError("Missing the required parameter `variable` when calling `patch_variables_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'variable_id' in local_var_params:
path_params['variableID'] = local_var_params['variable_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'variable' in local_var_params:
body_params = local_var_params['variable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/variables/{variableID}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Variable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def post_variables(self, variable, **kwargs): # noqa: E501
"""create a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_variables(variable, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Variable variable: variable to create (required)
:param str zap_trace_span: OpenTracing span context
:return: Variable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_variables_with_http_info(variable, **kwargs) # noqa: E501
else:
(data) = self.post_variables_with_http_info(variable, **kwargs) # noqa: E501
return data
def post_variables_with_http_info(self, variable, **kwargs): # noqa: E501
"""create a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_variables_with_http_info(variable, async_req=True)
>>> result = thread.get()
:param async_req bool
:param Variable variable: variable to create (required)
:param str zap_trace_span: OpenTracing span context
:return: Variable
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['variable', 'zap_trace_span'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_variables" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'variable' is set
if ('variable' not in local_var_params or
local_var_params['variable'] is None):
raise ValueError("Missing the required parameter `variable` when calling `post_variables`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'variable' in local_var_params:
body_params = local_var_params['variable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/variables', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Variable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def post_variables_id_labels(self, variable_id, label_mapping, **kwargs): # noqa: E501
"""add a label to a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_variables_id_labels(variable_id, label_mapping, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str variable_id: ID of the variable (required)
:param LabelMapping label_mapping: label to add (required)
:param str zap_trace_span: OpenTracing span context
:return: LabelResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.post_variables_id_labels_with_http_info(variable_id, label_mapping, **kwargs) # noqa: E501
else:
(data) = self.post_variables_id_labels_with_http_info(variable_id, label_mapping, **kwargs) # noqa: E501
return data
def post_variables_id_labels_with_http_info(self, variable_id, label_mapping, **kwargs): # noqa: E501
"""add a label to a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.post_variables_id_labels_with_http_info(variable_id, label_mapping, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str variable_id: ID of the variable (required)
:param LabelMapping label_mapping: label to add (required)
:param str zap_trace_span: OpenTracing span context
:return: LabelResponse
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['variable_id', 'label_mapping', 'zap_trace_span'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method post_variables_id_labels" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'variable_id' is set
if ('variable_id' not in local_var_params or
local_var_params['variable_id'] is None):
raise ValueError("Missing the required parameter `variable_id` when calling `post_variables_id_labels`") # noqa: E501
# verify the required parameter 'label_mapping' is set
if ('label_mapping' not in local_var_params or
local_var_params['label_mapping'] is None):
raise ValueError("Missing the required parameter `label_mapping` when calling `post_variables_id_labels`") # noqa: E501
collection_formats = {}
path_params = {}
if 'variable_id' in local_var_params:
path_params['variableID'] = local_var_params['variable_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'label_mapping' in local_var_params:
body_params = local_var_params['label_mapping']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/variables/{variableID}/labels', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LabelResponse', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def put_variables_id(self, variable_id, variable, **kwargs): # noqa: E501
"""replace a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_variables_id(variable_id, variable, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str variable_id: id of the variable (required)
:param Variable variable: variable to replace (required)
:param str zap_trace_span: OpenTracing span context
:return: Variable
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.put_variables_id_with_http_info(variable_id, variable, **kwargs) # noqa: E501
else:
(data) = self.put_variables_id_with_http_info(variable_id, variable, **kwargs) # noqa: E501
return data
def put_variables_id_with_http_info(self, variable_id, variable, **kwargs): # noqa: E501
"""replace a variable # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.put_variables_id_with_http_info(variable_id, variable, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str variable_id: id of the variable (required)
:param Variable variable: variable to replace (required)
:param str zap_trace_span: OpenTracing span context
:return: Variable
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = ['variable_id', 'variable', 'zap_trace_span'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method put_variables_id" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'variable_id' is set
if ('variable_id' not in local_var_params or
local_var_params['variable_id'] is None):
raise ValueError("Missing the required parameter `variable_id` when calling `put_variables_id`") # noqa: E501
# verify the required parameter 'variable' is set
if ('variable' not in local_var_params or
local_var_params['variable'] is None):
raise ValueError("Missing the required parameter `variable` when calling `put_variables_id`") # noqa: E501
collection_formats = {}
path_params = {}
if 'variable_id' in local_var_params:
path_params['variableID'] = local_var_params['variable_id'] # noqa: E501
query_params = []
header_params = {}
if 'zap_trace_span' in local_var_params:
header_params['Zap-Trace-Span'] = local_var_params['zap_trace_span'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'variable' in local_var_params:
body_params = local_var_params['variable']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/api/v2/variables/{variableID}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Variable', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 41.596473
| 135
| 0.629816
| 4,808
| 40,099
| 4.940932
| 0.03619
| 0.053881
| 0.083684
| 0.027277
| 0.967798
| 0.963672
| 0.958116
| 0.950876
| 0.943846
| 0.931849
| 0
| 0.015156
| 0.284246
| 40,099
| 963
| 136
| 41.639668
| 0.81255
| 0.308287
| 0
| 0.802682
| 1
| 0
| 0.194629
| 0.041565
| 0
| 0
| 0
| 0
| 0
| 1
| 0.036398
| false
| 0
| 0.007663
| 0
| 0.097701
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a117fa3da205fffca963ba6aa31e36ecb1a0dad7
| 8,270
|
py
|
Python
|
nlm/tests/test_rpc.py
|
hscspring/NLM
|
d0c5b993e54ad8ff921568bae46403987301de29
|
[
"MIT"
] | 42
|
2019-12-02T10:35:32.000Z
|
2020-04-06T16:02:18.000Z
|
nlm/tests/test_rpc.py
|
hscspring/NLM
|
d0c5b993e54ad8ff921568bae46403987301de29
|
[
"MIT"
] | null | null | null |
nlm/tests/test_rpc.py
|
hscspring/NLM
|
d0c5b993e54ad8ff921568bae46403987301de29
|
[
"MIT"
] | 13
|
2019-12-03T06:09:10.000Z
|
2020-03-18T16:12:10.000Z
|
import os
import sys
import pytest
import json
import grpc
ROOT_PATH = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.append(ROOT_PATH)
import nlm_pb2
import nlm_pb2_grpc
@pytest.fixture(scope='module')
def grpc_add_to_server():
from nlm_pb2_grpc import add_NLMServicer_to_server
return add_NLMServicer_to_server
@pytest.fixture(scope='module')
def grpc_servicer():
from server import NLMService
return NLMService()
@pytest.fixture(scope='module')
def grpc_stub_cls(grpc_channel):
from nlm_pb2_grpc import NLMStub
return NLMStub
def test_recall_node_exist(grpc_stub):
label = "Person"
name = "AliceFive"
props = {"age": 24}
request = nlm_pb2.GraphNode(
label=label, name=name, props=json.dumps(props))
response = grpc_stub.NodeRecall(request)
assert isinstance(response, nlm_pb2.GraphNode)
assert response.label == label
assert response.name == name
assert isinstance(response.props, str)
assert json.loads(response.props) == {"age": 24,
"occupation": "scientist",
"sex": "female"}
def test_recall_node_not_exist(grpc_stub):
label = "Person"
name = "AliceFive1"
props = {"age": 24}
request = nlm_pb2.GraphNode(
label=label, name=name, props=json.dumps(props))
response = grpc_stub.NodeRecall(request)
assert isinstance(response, nlm_pb2.GraphNode)
assert response.label == label
assert response.name == name
assert isinstance(response.props, str)
assert json.loads(response.props) == props
def test_recall_relation_exist(grpc_stub):
start = nlm_pb2.GraphNode(
label="Person", name="AliceThree", props=json.dumps({}))
end = nlm_pb2.GraphNode(
label="Person", name="AliceOne", props=json.dumps({}))
kind = "LOVES"
props = {"roles": "husband"}
request = nlm_pb2.GraphRelation(
start=start, end=end, kind=kind, props=json.dumps(props))
response = grpc_stub.RelationRecall(request)
assert isinstance(response, nlm_pb2.GraphRelation)
assert response.start != start
assert response.end != end
assert response.start.name == start.name
assert response.end.label == end.label
assert response.kind == kind
assert isinstance(response.props, str)
assert json.loads(response.props) != props
def test_recall_relation_exist_fuzzy_kind(grpc_stub):
start = nlm_pb2.GraphNode(
label="Person", name="AliceThree", props=json.dumps({}))
end = nlm_pb2.GraphNode(
label="Person", name="AliceOne", props=json.dumps({}))
kind = "LOVEING"
props = {"roles": "husband"}
request = nlm_pb2.GraphRelation(
start=start, end=end, kind=kind, props=json.dumps(props))
response = grpc_stub.RelationRecall(request)
assert isinstance(response, nlm_pb2.GraphRelation)
assert response.start != start
assert response.end != end
assert response.start.name == start.name
assert response.end.label == end.label
assert response.kind == "LOVES"
assert isinstance(response.props, str)
assert json.loads(response.props) != props
def test_recall_relation_exist_only_props1(grpc_stub):
start = nlm_pb2.GraphNode(
label="Person", name="AliceThree", props=json.dumps({}))
end = nlm_pb2.GraphNode(
label="Person", name="AliceOne", props=json.dumps({}))
props = {"roles": "husband"}
request = nlm_pb2.GraphRelation(
start=start, end=end, kind=None, props=json.dumps(props))
response = grpc_stub.RelationRecall(request)
assert isinstance(response, nlm_pb2.GraphRelation)
assert response.start != start
assert response.end != end
assert response.start.name == start.name
assert response.end.label == end.label
assert response.kind == "LOVES"
assert isinstance(response.props, str)
assert json.loads(response.props) != props
def test_recall_relation_exist_only_props2(grpc_stub):
start = nlm_pb2.GraphNode(
label="Person", name="AliceThree", props=json.dumps({}))
end = nlm_pb2.GraphNode(
label="Person", name="AliceOne", props=json.dumps({}))
props = {"from": 2009}
request = nlm_pb2.GraphRelation(
start=start, end=end, kind=None, props=json.dumps(props))
response = grpc_stub.RelationRecall(request)
assert isinstance(response, nlm_pb2.GraphRelation)
assert response.start != start
assert response.end != end
assert response.start.name == start.name
assert response.end.label == end.label
assert response.kind == "WORK_WITH"
assert isinstance(response.props, str)
assert json.loads(response.props) != props
def test_recall_relation_start_end_not_exist(grpc_stub):
start = nlm_pb2.GraphNode(
label="Person", name="AliceThreeNotExist", props=json.dumps({}))
end = nlm_pb2.GraphNode(
label="Person", name="AliceOneNotExist", props=json.dumps({}))
kind = "LOVES"
props = {}
request = nlm_pb2.GraphRelation(
start=start, end=end, kind=kind, props=json.dumps(props))
response = grpc_stub.RelationRecall(request)
assert isinstance(response, nlm_pb2.GraphRelation)
assert response.start == start
assert response.end == end
assert response.kind == kind
assert isinstance(response.props, str)
assert json.loads(response.props) == props
def test_recall_relation_start_not_exist1(grpc_stub):
start = nlm_pb2.GraphNode(
label="Person", name="AliceThreeNotExist", props=json.dumps({}))
end = nlm_pb2.GraphNode(
label="Person", name="AliceOne", props=json.dumps({}))
kind = "LOVES"
props = {}
request = nlm_pb2.GraphRelation(
start=start, end=end, kind=kind, props=json.dumps(props))
response = grpc_stub.RelationRecall(request)
assert isinstance(response, nlm_pb2.GraphRelation)
assert response.start.name == "AliceThree"
assert response.end != end
assert response.kind == kind
assert isinstance(response.props, str)
assert json.loads(response.props) != props
def test_recall_relation_start_not_exist2(grpc_stub):
start = nlm_pb2.GraphNode(
label="Person", name="AliceThreeNotExist", props=json.dumps({}))
end = nlm_pb2.GraphNode(
label="Person", name="AliceOne", props=json.dumps({}))
kind = "LIKES"
props = {}
request = nlm_pb2.GraphRelation(
start=start, end=end, kind=kind, props=json.dumps(props))
response = grpc_stub.RelationRecall(request)
assert isinstance(response, nlm_pb2.GraphRelation)
assert response.start.name == "AliceSeven"
assert response.end != end
assert response.kind == kind
assert isinstance(response.props, str)
assert json.loads(response.props) != props
def test_recall_rawstring_exist(grpc_stub):
text = "some text"
request = nlm_pb2.RawString(text=text)
response = grpc_stub.StrRecall(request)
pass
def test_recall_rawstring_not_exist(grpc_stub):
text = "not exist text"
request = nlm_pb2.RawString(text=text)
response = grpc_stub.StrRecall(request)
assert isinstance(response, nlm_pb2.GraphOutput)
assert isinstance(response.gn, nlm_pb2.GraphNode)
assert response.gn.label == ""
assert response.gn.name == ""
assert response.gn.props == ""
def test_recall_nlu_exist(grpc_stub):
text = "some text"
entity1 = nlm_pb2.Entity(entity="Person", value="Alice")
entity2 = nlm_pb2.Entity(entity="Person", value="Bob")
intent = "Social"
request = nlm_pb2.NLMInput(
text=text, intent=intent, entities=[entity1, entity2])
response = grpc_stub.NLURecall(request)
pass
def test_recall_nlu_not_exist(grpc_stub):
text = "some text"
entity1 = nlm_pb2.Entity(entity="Person", value="AliceNotExist")
entity2 = nlm_pb2.Entity(entity="Person", value="BobNotExist")
intent = "Social"
request = nlm_pb2.NLMInput(
text=text, intent=intent, entities=[entity1, entity2])
response = grpc_stub.NLURecall(request)
assert isinstance(response, nlm_pb2.GraphOutput)
assert isinstance(response.gn, nlm_pb2.GraphNode)
assert response.gn.label == ""
assert response.gn.name == ""
assert response.gn.props == ""
| 33.617886
| 72
| 0.690085
| 1,019
| 8,270
| 5.451423
| 0.101079
| 0.051845
| 0.057966
| 0.057606
| 0.90369
| 0.880468
| 0.849865
| 0.836904
| 0.836904
| 0.836904
| 0
| 0.010567
| 0.187545
| 8,270
| 245
| 73
| 33.755102
| 0.816193
| 0
| 0
| 0.751244
| 0
| 0
| 0.065715
| 0
| 0
| 0
| 0
| 0
| 0.348259
| 1
| 0.079602
| false
| 0.00995
| 0.049751
| 0
| 0.144279
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a1181b5b954c0011ce3eadf190098b8cc0c40b95
| 107
|
py
|
Python
|
getting_started/line.py
|
AoEiuV020/LearningPython
|
aac0f3f99cfd3d03a96a3c0e41da8f82ea0b8c70
|
[
"MIT"
] | null | null | null |
getting_started/line.py
|
AoEiuV020/LearningPython
|
aac0f3f99cfd3d03a96a3c0e41da8f82ea0b8c70
|
[
"MIT"
] | null | null | null |
getting_started/line.py
|
AoEiuV020/LearningPython
|
aac0f3f99cfd3d03a96a3c0e41da8f82ea0b8c70
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
assert True; assert not False
assert True
assert not False
| 15.285714
| 29
| 0.691589
| 17
| 107
| 4.352941
| 0.647059
| 0.27027
| 0.432432
| 0.513514
| 0.648649
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022472
| 0.168224
| 107
| 6
| 30
| 17.833333
| 0.808989
| 0.401869
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a126b77cfcb10de3c0a43009c025f97c29906400
| 4,480
|
py
|
Python
|
kornia/morphology/morphology.py
|
gf0507033/kornia
|
2624f40a62d3639e6d946f3ca41fd1ce4b9de82d
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
kornia/morphology/morphology.py
|
gf0507033/kornia
|
2624f40a62d3639e6d946f3ca41fd1ce4b9de82d
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
kornia/morphology/morphology.py
|
gf0507033/kornia
|
2624f40a62d3639e6d946f3ca41fd1ce4b9de82d
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# Libraries
import torch
import torch.nn as nn
import torch.nn.functional as F
from kornia.morphology.basic_operators import dilation, erosion
from kornia.morphology.open_close import open, close
# morphological gradient
def gradient(tensor: torch.Tensor, kernel: torch.Tensor) -> torch.Tensor:
r"""
Returns the morphological gradient of an image,
(that means, dilation - erosion) applying the same kernel in each channel.
The kernel must have 2 dimensions, each one defined by an odd number.
Args
tensor (torch.Tensor): Image with shape :math:`(B, C, H, W)`.
kernel (torch.Tensor): Structuring element with shape :math:`(H, W)`.
Returns:
torch.Tensor: Dilated image with shape :math:`(B, C, H, W)`.
Example:
>>> tensor = torch.rand(1, 3, 5, 5)
>>> kernel = torch.ones(3, 3)
>>> gradient_img = gradient(tensor, kernel)
"""
if not isinstance(tensor, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(tensor)))
if len(tensor.shape) != 4:
raise ValueError("Input size must have 4 dimensions. Got {}".format(
tensor.dim()))
if not isinstance(kernel, torch.Tensor):
raise TypeError("Kernel type is not a torch.Tensor. Got {}".format(
type(kernel)))
if len(kernel.shape) != 2:
raise ValueError("Kernel size must have 2 dimensions. Got {}".format(
kernel.dim()))
return dilation(tensor, kernel) - erosion(tensor, kernel)
# top_hat
def top_hat(tensor: torch.Tensor, kernel: torch.Tensor) -> torch.Tensor:
r"""
Returns the top hat tranformation of an image,
(that means, image - opened_image) applying the same kernel in each channel.
The kernel must have 2 dimensions, each one defined by an odd number.
See :class:`~kornia.morphology.open` for details.
Args
tensor (torch.Tensor): Image with shape :math:`(B, C, H, W)`.
kernel (torch.Tensor): Structuring element with shape :math:`(H, W)`.
Returns:
torch.Tensor: Top hat transformated image with shape :math:`(B, C, H, W)`.
Example:
>>> tensor = torch.rand(1, 3, 5, 5)
>>> kernel = torch.ones(3, 3)
>>> top_hat_img = top_hat(tensor, kernel)
"""
if not isinstance(tensor, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(tensor)))
if len(tensor.shape) != 4:
raise ValueError("Input size must have 4 dimensions. Got {}".format(
tensor.dim()))
if not isinstance(kernel, torch.Tensor):
raise TypeError("Kernel type is not a torch.Tensor. Got {}".format(
type(kernel)))
if len(kernel.shape) != 2:
raise ValueError("Kernel size must have 2 dimensions. Got {}".format(
kernel.dim()))
return tensor - open(tensor, kernel)
# black_hat
def black_hat(tensor: torch.Tensor, kernel: torch.Tensor) -> torch.Tensor:
r"""
Returns the black hat tranformation of an image,
(that means, closed_image - image) applying the same kernel in each channel.
The kernel must have 2 dimensions, each one defined by an odd number.
See :class:`~kornia.morphology.close` for details.
Args
tensor (torch.Tensor): Image with shape :math:`(B, C, H, W)`.
kernel (torch.Tensor): Structuring element with shape :math:`(H, W)`.
Returns:
torch.Tensor: Top hat transformated image with shape :math:`(B, C, H, W)`.
Example:
>>> tensor = torch.rand(1, 3, 5, 5)
>>> kernel = torch.ones(3, 3)
>>> black_hat_img = black_hat(tensor, kernel)
"""
if not isinstance(tensor, torch.Tensor):
raise TypeError("Input type is not a torch.Tensor. Got {}".format(
type(tensor)))
if len(tensor.shape) != 4:
raise ValueError("Input size must have 4 dimensions. Got {}".format(
tensor.dim()))
if not isinstance(kernel, torch.Tensor):
raise TypeError("Kernel type is not a torch.Tensor. Got {}".format(
type(kernel)))
if len(kernel.shape) != 2:
raise ValueError("Kernel size must have 2 dimensions. Got {}".format(
kernel.dim()))
return close(tensor, kernel) - tensor
| 30.27027
| 85
| 0.602902
| 579
| 4,480
| 4.639033
| 0.153713
| 0.122859
| 0.075949
| 0.042442
| 0.838049
| 0.831348
| 0.831348
| 0.806031
| 0.806031
| 0.806031
| 0
| 0.010195
| 0.277455
| 4,480
| 147
| 86
| 30.47619
| 0.819586
| 0.407589
| 0
| 0.72
| 0
| 0
| 0.214379
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06
| false
| 0
| 0.1
| 0
| 0.22
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a128c3a3819e7cffb1b0c89f2415e97065855f8e
| 1,443
|
py
|
Python
|
test/test_api.py
|
rviollette/ctypeslib
|
7b2a97477da96bdb91ff3182544650c9331f9d42
|
[
"MIT"
] | null | null | null |
test/test_api.py
|
rviollette/ctypeslib
|
7b2a97477da96bdb91ff3182544650c9331f9d42
|
[
"MIT"
] | null | null | null |
test/test_api.py
|
rviollette/ctypeslib
|
7b2a97477da96bdb91ff3182544650c9331f9d42
|
[
"MIT"
] | null | null | null |
import unittest
import io
import ctypeslib
class ApiTest(unittest.TestCase):
def test_basic_use_string(self):
py_namespace = ctypeslib.translate('''
int i = 12;
char c2[3] = {'a','b','c'};
struct example_detail {
int first;
int last;
};
struct example {
int argsz;
int flags;
int count;
struct example_detail details[2];
};
''')
self.assertIn("i", py_namespace)
self.assertIn("c2", py_namespace)
self.assertIn("struct_example_detail", py_namespace)
self.assertIn("struct_example", py_namespace)
self.assertEqual(py_namespace.i, 12)
self.assertEqual(py_namespace.c2, ['a', 'b', 'c'])
# import pprint
# pprint.pprint(py_namespace)
def test_basic_use_io(self):
input_io = io.StringIO('''
int i = 12;
char c2[3] = {'a','b','c'};
struct example_detail {
int first;
int last;
};
struct example {
int argsz;
int flags;
int count;
struct example_detail details[2];
};
''')
py_namespace = ctypeslib.translate(input_io)
self.assertIn("i", py_namespace)
self.assertIn("c2", py_namespace)
self.assertIn("struct_example_detail", py_namespace)
self.assertIn("struct_example", py_namespace)
self.assertEqual(py_namespace.i, 12)
self.assertEqual(py_namespace.c2, ['a', 'b', 'c'])
if __name__ == '__main__':
unittest.main()
| 24.05
| 60
| 0.61885
| 179
| 1,443
| 4.759777
| 0.240223
| 0.193662
| 0.140845
| 0.161972
| 0.706573
| 0.706573
| 0.706573
| 0.706573
| 0.706573
| 0.706573
| 0
| 0.016544
| 0.246015
| 1,443
| 59
| 61
| 24.457627
| 0.766544
| 0.028413
| 0
| 0.77551
| 0
| 0
| 0.374553
| 0.030021
| 0
| 0
| 0
| 0
| 0.244898
| 1
| 0.040816
| false
| 0
| 0.061224
| 0
| 0.122449
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a1798a095c8260572f87f6045e50e3aab9085761
| 114
|
py
|
Python
|
ramda/dec_test.py
|
Rafi993/pyramda
|
4fa7fe28d5eaa798b702d28bdd3948515cb88f48
|
[
"MIT"
] | 56
|
2018-08-06T08:44:58.000Z
|
2022-03-17T09:49:03.000Z
|
ramda/dec_test.py
|
Rafi993/pyramda
|
4fa7fe28d5eaa798b702d28bdd3948515cb88f48
|
[
"MIT"
] | 28
|
2019-06-17T11:09:52.000Z
|
2022-02-18T16:59:21.000Z
|
ramda/dec_test.py
|
slavaGanzin/pyramda
|
4fa7fe28d5eaa798b702d28bdd3948515cb88f48
|
[
"MIT"
] | 5
|
2019-09-18T09:24:38.000Z
|
2021-07-21T08:40:23.000Z
|
from .dec import dec
from ramda.private.asserts import assert_equal
def dec_test():
assert_equal(dec(5), 4)
| 16.285714
| 46
| 0.745614
| 19
| 114
| 4.315789
| 0.631579
| 0.268293
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.020833
| 0.157895
| 114
| 6
| 47
| 19
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
a191d934711bed3af32e9e697e43102d0a3c4215
| 35,682
|
py
|
Python
|
sdk/python/pulumi_azure/streamanalytics/reference_input_mssql.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2018-06-18T00:19:44.000Z
|
2022-02-20T05:32:57.000Z
|
sdk/python/pulumi_azure/streamanalytics/reference_input_mssql.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 663
|
2018-06-18T21:08:46.000Z
|
2022-03-31T20:10:11.000Z
|
sdk/python/pulumi_azure/streamanalytics/reference_input_mssql.py
|
henriktao/pulumi-azure
|
f1cbcf100b42b916da36d8fe28be3a159abaf022
|
[
"ECL-2.0",
"Apache-2.0"
] | 41
|
2018-07-19T22:37:38.000Z
|
2022-03-14T10:56:26.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['ReferenceInputMssqlArgs', 'ReferenceInputMssql']
@pulumi.input_type
class ReferenceInputMssqlArgs:
def __init__(__self__, *,
database: pulumi.Input[str],
full_snapshot_query: pulumi.Input[str],
password: pulumi.Input[str],
refresh_type: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
server: pulumi.Input[str],
stream_analytics_job_name: pulumi.Input[str],
username: pulumi.Input[str],
delta_snapshot_query: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
refresh_interval_duration: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ReferenceInputMssql resource.
:param pulumi.Input[str] database: The MS SQL database name where the reference data exists.
:param pulumi.Input[str] full_snapshot_query: The query used to retrieve the reference data from the MS SQL database.
:param pulumi.Input[str] password: The username to connect to the MS SQL database.
:param pulumi.Input[str] refresh_type: Defines whether and how the reference data should be refreshed. Accepted values are `Static`, `RefreshPeriodicallyWithFull` and `RefreshPeriodicallyWithDelta`.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Stream Analytics Job should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] server: The fully qualified domain name of the MS SQL server.
:param pulumi.Input[str] stream_analytics_job_name: The name of the Stream Analytics Job. Changing this forces a new resource to be created.
:param pulumi.Input[str] username: The username to connect to the MS SQL database.
:param pulumi.Input[str] delta_snapshot_query: The query used to retrieve incremental changes in the reference data from the MS SQL database. Cannot be set when `refresh_type` is `Static`.
:param pulumi.Input[str] name: The name of the Reference Input MS SQL data. Changing this forces a new resource to be created.
:param pulumi.Input[str] refresh_interval_duration: The frequency in `hh:mm:ss` with which the reference data should be retrieved from the MS SQL database e.g. `00:20:00` for every 20 minutes. Must be set when `refresh_type` is `RefreshPeriodicallyWithFull` or `RefreshPeriodicallyWithDelta`.
"""
pulumi.set(__self__, "database", database)
pulumi.set(__self__, "full_snapshot_query", full_snapshot_query)
pulumi.set(__self__, "password", password)
pulumi.set(__self__, "refresh_type", refresh_type)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "server", server)
pulumi.set(__self__, "stream_analytics_job_name", stream_analytics_job_name)
pulumi.set(__self__, "username", username)
if delta_snapshot_query is not None:
pulumi.set(__self__, "delta_snapshot_query", delta_snapshot_query)
if name is not None:
pulumi.set(__self__, "name", name)
if refresh_interval_duration is not None:
pulumi.set(__self__, "refresh_interval_duration", refresh_interval_duration)
@property
@pulumi.getter
def database(self) -> pulumi.Input[str]:
"""
The MS SQL database name where the reference data exists.
"""
return pulumi.get(self, "database")
@database.setter
def database(self, value: pulumi.Input[str]):
pulumi.set(self, "database", value)
@property
@pulumi.getter(name="fullSnapshotQuery")
def full_snapshot_query(self) -> pulumi.Input[str]:
"""
The query used to retrieve the reference data from the MS SQL database.
"""
return pulumi.get(self, "full_snapshot_query")
@full_snapshot_query.setter
def full_snapshot_query(self, value: pulumi.Input[str]):
pulumi.set(self, "full_snapshot_query", value)
@property
@pulumi.getter
def password(self) -> pulumi.Input[str]:
"""
The username to connect to the MS SQL database.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: pulumi.Input[str]):
pulumi.set(self, "password", value)
@property
@pulumi.getter(name="refreshType")
def refresh_type(self) -> pulumi.Input[str]:
"""
Defines whether and how the reference data should be refreshed. Accepted values are `Static`, `RefreshPeriodicallyWithFull` and `RefreshPeriodicallyWithDelta`.
"""
return pulumi.get(self, "refresh_type")
@refresh_type.setter
def refresh_type(self, value: pulumi.Input[str]):
pulumi.set(self, "refresh_type", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the Resource Group where the Stream Analytics Job should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def server(self) -> pulumi.Input[str]:
"""
The fully qualified domain name of the MS SQL server.
"""
return pulumi.get(self, "server")
@server.setter
def server(self, value: pulumi.Input[str]):
pulumi.set(self, "server", value)
@property
@pulumi.getter(name="streamAnalyticsJobName")
def stream_analytics_job_name(self) -> pulumi.Input[str]:
"""
The name of the Stream Analytics Job. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "stream_analytics_job_name")
@stream_analytics_job_name.setter
def stream_analytics_job_name(self, value: pulumi.Input[str]):
pulumi.set(self, "stream_analytics_job_name", value)
@property
@pulumi.getter
def username(self) -> pulumi.Input[str]:
"""
The username to connect to the MS SQL database.
"""
return pulumi.get(self, "username")
@username.setter
def username(self, value: pulumi.Input[str]):
pulumi.set(self, "username", value)
@property
@pulumi.getter(name="deltaSnapshotQuery")
def delta_snapshot_query(self) -> Optional[pulumi.Input[str]]:
"""
The query used to retrieve incremental changes in the reference data from the MS SQL database. Cannot be set when `refresh_type` is `Static`.
"""
return pulumi.get(self, "delta_snapshot_query")
@delta_snapshot_query.setter
def delta_snapshot_query(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "delta_snapshot_query", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Reference Input MS SQL data. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="refreshIntervalDuration")
def refresh_interval_duration(self) -> Optional[pulumi.Input[str]]:
"""
The frequency in `hh:mm:ss` with which the reference data should be retrieved from the MS SQL database e.g. `00:20:00` for every 20 minutes. Must be set when `refresh_type` is `RefreshPeriodicallyWithFull` or `RefreshPeriodicallyWithDelta`.
"""
return pulumi.get(self, "refresh_interval_duration")
@refresh_interval_duration.setter
def refresh_interval_duration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "refresh_interval_duration", value)
@pulumi.input_type
class _ReferenceInputMssqlState:
def __init__(__self__, *,
database: Optional[pulumi.Input[str]] = None,
delta_snapshot_query: Optional[pulumi.Input[str]] = None,
full_snapshot_query: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
refresh_interval_duration: Optional[pulumi.Input[str]] = None,
refresh_type: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server: Optional[pulumi.Input[str]] = None,
stream_analytics_job_name: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering ReferenceInputMssql resources.
:param pulumi.Input[str] database: The MS SQL database name where the reference data exists.
:param pulumi.Input[str] delta_snapshot_query: The query used to retrieve incremental changes in the reference data from the MS SQL database. Cannot be set when `refresh_type` is `Static`.
:param pulumi.Input[str] full_snapshot_query: The query used to retrieve the reference data from the MS SQL database.
:param pulumi.Input[str] name: The name of the Reference Input MS SQL data. Changing this forces a new resource to be created.
:param pulumi.Input[str] password: The username to connect to the MS SQL database.
:param pulumi.Input[str] refresh_interval_duration: The frequency in `hh:mm:ss` with which the reference data should be retrieved from the MS SQL database e.g. `00:20:00` for every 20 minutes. Must be set when `refresh_type` is `RefreshPeriodicallyWithFull` or `RefreshPeriodicallyWithDelta`.
:param pulumi.Input[str] refresh_type: Defines whether and how the reference data should be refreshed. Accepted values are `Static`, `RefreshPeriodicallyWithFull` and `RefreshPeriodicallyWithDelta`.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Stream Analytics Job should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] server: The fully qualified domain name of the MS SQL server.
:param pulumi.Input[str] stream_analytics_job_name: The name of the Stream Analytics Job. Changing this forces a new resource to be created.
:param pulumi.Input[str] username: The username to connect to the MS SQL database.
"""
if database is not None:
pulumi.set(__self__, "database", database)
if delta_snapshot_query is not None:
pulumi.set(__self__, "delta_snapshot_query", delta_snapshot_query)
if full_snapshot_query is not None:
pulumi.set(__self__, "full_snapshot_query", full_snapshot_query)
if name is not None:
pulumi.set(__self__, "name", name)
if password is not None:
pulumi.set(__self__, "password", password)
if refresh_interval_duration is not None:
pulumi.set(__self__, "refresh_interval_duration", refresh_interval_duration)
if refresh_type is not None:
pulumi.set(__self__, "refresh_type", refresh_type)
if resource_group_name is not None:
pulumi.set(__self__, "resource_group_name", resource_group_name)
if server is not None:
pulumi.set(__self__, "server", server)
if stream_analytics_job_name is not None:
pulumi.set(__self__, "stream_analytics_job_name", stream_analytics_job_name)
if username is not None:
pulumi.set(__self__, "username", username)
@property
@pulumi.getter
def database(self) -> Optional[pulumi.Input[str]]:
"""
The MS SQL database name where the reference data exists.
"""
return pulumi.get(self, "database")
@database.setter
def database(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database", value)
@property
@pulumi.getter(name="deltaSnapshotQuery")
def delta_snapshot_query(self) -> Optional[pulumi.Input[str]]:
"""
The query used to retrieve incremental changes in the reference data from the MS SQL database. Cannot be set when `refresh_type` is `Static`.
"""
return pulumi.get(self, "delta_snapshot_query")
@delta_snapshot_query.setter
def delta_snapshot_query(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "delta_snapshot_query", value)
@property
@pulumi.getter(name="fullSnapshotQuery")
def full_snapshot_query(self) -> Optional[pulumi.Input[str]]:
"""
The query used to retrieve the reference data from the MS SQL database.
"""
return pulumi.get(self, "full_snapshot_query")
@full_snapshot_query.setter
def full_snapshot_query(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "full_snapshot_query", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Reference Input MS SQL data. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def password(self) -> Optional[pulumi.Input[str]]:
"""
The username to connect to the MS SQL database.
"""
return pulumi.get(self, "password")
@password.setter
def password(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "password", value)
@property
@pulumi.getter(name="refreshIntervalDuration")
def refresh_interval_duration(self) -> Optional[pulumi.Input[str]]:
"""
The frequency in `hh:mm:ss` with which the reference data should be retrieved from the MS SQL database e.g. `00:20:00` for every 20 minutes. Must be set when `refresh_type` is `RefreshPeriodicallyWithFull` or `RefreshPeriodicallyWithDelta`.
"""
return pulumi.get(self, "refresh_interval_duration")
@refresh_interval_duration.setter
def refresh_interval_duration(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "refresh_interval_duration", value)
@property
@pulumi.getter(name="refreshType")
def refresh_type(self) -> Optional[pulumi.Input[str]]:
"""
Defines whether and how the reference data should be refreshed. Accepted values are `Static`, `RefreshPeriodicallyWithFull` and `RefreshPeriodicallyWithDelta`.
"""
return pulumi.get(self, "refresh_type")
@refresh_type.setter
def refresh_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "refresh_type", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Resource Group where the Stream Analytics Job should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter
def server(self) -> Optional[pulumi.Input[str]]:
"""
The fully qualified domain name of the MS SQL server.
"""
return pulumi.get(self, "server")
@server.setter
def server(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "server", value)
@property
@pulumi.getter(name="streamAnalyticsJobName")
def stream_analytics_job_name(self) -> Optional[pulumi.Input[str]]:
"""
The name of the Stream Analytics Job. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "stream_analytics_job_name")
@stream_analytics_job_name.setter
def stream_analytics_job_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "stream_analytics_job_name", value)
@property
@pulumi.getter
def username(self) -> Optional[pulumi.Input[str]]:
"""
The username to connect to the MS SQL database.
"""
return pulumi.get(self, "username")
@username.setter
def username(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "username", value)
class ReferenceInputMssql(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
database: Optional[pulumi.Input[str]] = None,
delta_snapshot_query: Optional[pulumi.Input[str]] = None,
full_snapshot_query: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
refresh_interval_duration: Optional[pulumi.Input[str]] = None,
refresh_type: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server: Optional[pulumi.Input[str]] = None,
stream_analytics_job_name: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Manages a Stream Analytics Reference Input from MS SQL. Reference data (also known as a lookup table) is a finite data set that is static or slowly changing in nature, used to perform a lookup or to correlate with your data stream. Learn more [here](https://docs.microsoft.com/en-us/azure/stream-analytics/stream-analytics-use-reference-data#azure-sql-database).
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.get_resource_group(name="example-resources")
example_job = azure.streamanalytics.get_job(name="example-job",
resource_group_name=azurerm_resource_group["example"]["name"])
example_server = azure.mssql.Server("exampleServer",
resource_group_name=azurerm_resource_group["example"]["name"],
location=azurerm_resource_group["example"]["location"],
version="12.0",
administrator_login="admin",
administrator_login_password="password")
example_database = azure.mssql.Database("exampleDatabase", server_id=example_server.id)
example_reference_input_mssql = azure.streamanalytics.ReferenceInputMssql("exampleReferenceInputMssql",
resource_group_name=azurerm_stream_analytics_job["example"]["resource_group_name"],
stream_analytics_job_name=azurerm_stream_analytics_job["example"]["name"],
server=example_server.fully_qualified_domain_name,
database=example_database.name,
username="exampleuser",
password="examplepassword",
refresh_type="RefreshPeriodicallyWithFull",
refresh_interval_duration="00:20:00",
full_snapshot_query=\"\"\" SELECT *
INTO [YourOutputAlias]
FROM [YourInputAlias]
\"\"\")
```
## Import
Stream Analytics can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:streamanalytics/referenceInputMssql:ReferenceInputMssql example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.StreamAnalytics/streamingjobs/job1/inputs/input1
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] database: The MS SQL database name where the reference data exists.
:param pulumi.Input[str] delta_snapshot_query: The query used to retrieve incremental changes in the reference data from the MS SQL database. Cannot be set when `refresh_type` is `Static`.
:param pulumi.Input[str] full_snapshot_query: The query used to retrieve the reference data from the MS SQL database.
:param pulumi.Input[str] name: The name of the Reference Input MS SQL data. Changing this forces a new resource to be created.
:param pulumi.Input[str] password: The username to connect to the MS SQL database.
:param pulumi.Input[str] refresh_interval_duration: The frequency in `hh:mm:ss` with which the reference data should be retrieved from the MS SQL database e.g. `00:20:00` for every 20 minutes. Must be set when `refresh_type` is `RefreshPeriodicallyWithFull` or `RefreshPeriodicallyWithDelta`.
:param pulumi.Input[str] refresh_type: Defines whether and how the reference data should be refreshed. Accepted values are `Static`, `RefreshPeriodicallyWithFull` and `RefreshPeriodicallyWithDelta`.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Stream Analytics Job should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] server: The fully qualified domain name of the MS SQL server.
:param pulumi.Input[str] stream_analytics_job_name: The name of the Stream Analytics Job. Changing this forces a new resource to be created.
:param pulumi.Input[str] username: The username to connect to the MS SQL database.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: ReferenceInputMssqlArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Manages a Stream Analytics Reference Input from MS SQL. Reference data (also known as a lookup table) is a finite data set that is static or slowly changing in nature, used to perform a lookup or to correlate with your data stream. Learn more [here](https://docs.microsoft.com/en-us/azure/stream-analytics/stream-analytics-use-reference-data#azure-sql-database).
## Example Usage
```python
import pulumi
import pulumi_azure as azure
example_resource_group = azure.core.get_resource_group(name="example-resources")
example_job = azure.streamanalytics.get_job(name="example-job",
resource_group_name=azurerm_resource_group["example"]["name"])
example_server = azure.mssql.Server("exampleServer",
resource_group_name=azurerm_resource_group["example"]["name"],
location=azurerm_resource_group["example"]["location"],
version="12.0",
administrator_login="admin",
administrator_login_password="password")
example_database = azure.mssql.Database("exampleDatabase", server_id=example_server.id)
example_reference_input_mssql = azure.streamanalytics.ReferenceInputMssql("exampleReferenceInputMssql",
resource_group_name=azurerm_stream_analytics_job["example"]["resource_group_name"],
stream_analytics_job_name=azurerm_stream_analytics_job["example"]["name"],
server=example_server.fully_qualified_domain_name,
database=example_database.name,
username="exampleuser",
password="examplepassword",
refresh_type="RefreshPeriodicallyWithFull",
refresh_interval_duration="00:20:00",
full_snapshot_query=\"\"\" SELECT *
INTO [YourOutputAlias]
FROM [YourInputAlias]
\"\"\")
```
## Import
Stream Analytics can be imported using the `resource id`, e.g.
```sh
$ pulumi import azure:streamanalytics/referenceInputMssql:ReferenceInputMssql example /subscriptions/00000000-0000-0000-0000-000000000000/resourceGroups/group1/providers/Microsoft.StreamAnalytics/streamingjobs/job1/inputs/input1
```
:param str resource_name: The name of the resource.
:param ReferenceInputMssqlArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ReferenceInputMssqlArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
database: Optional[pulumi.Input[str]] = None,
delta_snapshot_query: Optional[pulumi.Input[str]] = None,
full_snapshot_query: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
refresh_interval_duration: Optional[pulumi.Input[str]] = None,
refresh_type: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server: Optional[pulumi.Input[str]] = None,
stream_analytics_job_name: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ReferenceInputMssqlArgs.__new__(ReferenceInputMssqlArgs)
if database is None and not opts.urn:
raise TypeError("Missing required property 'database'")
__props__.__dict__["database"] = database
__props__.__dict__["delta_snapshot_query"] = delta_snapshot_query
if full_snapshot_query is None and not opts.urn:
raise TypeError("Missing required property 'full_snapshot_query'")
__props__.__dict__["full_snapshot_query"] = full_snapshot_query
__props__.__dict__["name"] = name
if password is None and not opts.urn:
raise TypeError("Missing required property 'password'")
__props__.__dict__["password"] = password
__props__.__dict__["refresh_interval_duration"] = refresh_interval_duration
if refresh_type is None and not opts.urn:
raise TypeError("Missing required property 'refresh_type'")
__props__.__dict__["refresh_type"] = refresh_type
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
if server is None and not opts.urn:
raise TypeError("Missing required property 'server'")
__props__.__dict__["server"] = server
if stream_analytics_job_name is None and not opts.urn:
raise TypeError("Missing required property 'stream_analytics_job_name'")
__props__.__dict__["stream_analytics_job_name"] = stream_analytics_job_name
if username is None and not opts.urn:
raise TypeError("Missing required property 'username'")
__props__.__dict__["username"] = username
super(ReferenceInputMssql, __self__).__init__(
'azure:streamanalytics/referenceInputMssql:ReferenceInputMssql',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
database: Optional[pulumi.Input[str]] = None,
delta_snapshot_query: Optional[pulumi.Input[str]] = None,
full_snapshot_query: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
password: Optional[pulumi.Input[str]] = None,
refresh_interval_duration: Optional[pulumi.Input[str]] = None,
refresh_type: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
server: Optional[pulumi.Input[str]] = None,
stream_analytics_job_name: Optional[pulumi.Input[str]] = None,
username: Optional[pulumi.Input[str]] = None) -> 'ReferenceInputMssql':
"""
Get an existing ReferenceInputMssql resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] database: The MS SQL database name where the reference data exists.
:param pulumi.Input[str] delta_snapshot_query: The query used to retrieve incremental changes in the reference data from the MS SQL database. Cannot be set when `refresh_type` is `Static`.
:param pulumi.Input[str] full_snapshot_query: The query used to retrieve the reference data from the MS SQL database.
:param pulumi.Input[str] name: The name of the Reference Input MS SQL data. Changing this forces a new resource to be created.
:param pulumi.Input[str] password: The username to connect to the MS SQL database.
:param pulumi.Input[str] refresh_interval_duration: The frequency in `hh:mm:ss` with which the reference data should be retrieved from the MS SQL database e.g. `00:20:00` for every 20 minutes. Must be set when `refresh_type` is `RefreshPeriodicallyWithFull` or `RefreshPeriodicallyWithDelta`.
:param pulumi.Input[str] refresh_type: Defines whether and how the reference data should be refreshed. Accepted values are `Static`, `RefreshPeriodicallyWithFull` and `RefreshPeriodicallyWithDelta`.
:param pulumi.Input[str] resource_group_name: The name of the Resource Group where the Stream Analytics Job should exist. Changing this forces a new resource to be created.
:param pulumi.Input[str] server: The fully qualified domain name of the MS SQL server.
:param pulumi.Input[str] stream_analytics_job_name: The name of the Stream Analytics Job. Changing this forces a new resource to be created.
:param pulumi.Input[str] username: The username to connect to the MS SQL database.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ReferenceInputMssqlState.__new__(_ReferenceInputMssqlState)
__props__.__dict__["database"] = database
__props__.__dict__["delta_snapshot_query"] = delta_snapshot_query
__props__.__dict__["full_snapshot_query"] = full_snapshot_query
__props__.__dict__["name"] = name
__props__.__dict__["password"] = password
__props__.__dict__["refresh_interval_duration"] = refresh_interval_duration
__props__.__dict__["refresh_type"] = refresh_type
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["server"] = server
__props__.__dict__["stream_analytics_job_name"] = stream_analytics_job_name
__props__.__dict__["username"] = username
return ReferenceInputMssql(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def database(self) -> pulumi.Output[str]:
"""
The MS SQL database name where the reference data exists.
"""
return pulumi.get(self, "database")
@property
@pulumi.getter(name="deltaSnapshotQuery")
def delta_snapshot_query(self) -> pulumi.Output[Optional[str]]:
"""
The query used to retrieve incremental changes in the reference data from the MS SQL database. Cannot be set when `refresh_type` is `Static`.
"""
return pulumi.get(self, "delta_snapshot_query")
@property
@pulumi.getter(name="fullSnapshotQuery")
def full_snapshot_query(self) -> pulumi.Output[str]:
"""
The query used to retrieve the reference data from the MS SQL database.
"""
return pulumi.get(self, "full_snapshot_query")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the Reference Input MS SQL data. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def password(self) -> pulumi.Output[str]:
"""
The username to connect to the MS SQL database.
"""
return pulumi.get(self, "password")
@property
@pulumi.getter(name="refreshIntervalDuration")
def refresh_interval_duration(self) -> pulumi.Output[Optional[str]]:
"""
The frequency in `hh:mm:ss` with which the reference data should be retrieved from the MS SQL database e.g. `00:20:00` for every 20 minutes. Must be set when `refresh_type` is `RefreshPeriodicallyWithFull` or `RefreshPeriodicallyWithDelta`.
"""
return pulumi.get(self, "refresh_interval_duration")
@property
@pulumi.getter(name="refreshType")
def refresh_type(self) -> pulumi.Output[str]:
"""
Defines whether and how the reference data should be refreshed. Accepted values are `Static`, `RefreshPeriodicallyWithFull` and `RefreshPeriodicallyWithDelta`.
"""
return pulumi.get(self, "refresh_type")
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Output[str]:
"""
The name of the Resource Group where the Stream Analytics Job should exist. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "resource_group_name")
@property
@pulumi.getter
def server(self) -> pulumi.Output[str]:
"""
The fully qualified domain name of the MS SQL server.
"""
return pulumi.get(self, "server")
@property
@pulumi.getter(name="streamAnalyticsJobName")
def stream_analytics_job_name(self) -> pulumi.Output[str]:
"""
The name of the Stream Analytics Job. Changing this forces a new resource to be created.
"""
return pulumi.get(self, "stream_analytics_job_name")
@property
@pulumi.getter
def username(self) -> pulumi.Output[str]:
"""
The username to connect to the MS SQL database.
"""
return pulumi.get(self, "username")
| 50.684659
| 370
| 0.676279
| 4,300
| 35,682
| 5.40186
| 0.056744
| 0.069614
| 0.087395
| 0.071035
| 0.917126
| 0.899733
| 0.88488
| 0.868521
| 0.85849
| 0.841355
| 0
| 0.005282
| 0.230592
| 35,682
| 703
| 371
| 50.756757
| 0.840788
| 0.410347
| 0
| 0.695313
| 1
| 0
| 0.120742
| 0.038442
| 0
| 0
| 0
| 0
| 0
| 1
| 0.161458
| false
| 0.065104
| 0.013021
| 0
| 0.270833
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
a1981623b7677da063b8b7f7de94860d4b096a7c
| 31,391
|
py
|
Python
|
test/functional/omni_nonfungibletokens.py
|
fiscalobject/uniasset
|
54337e5bfae4af6b1ac453937038201835de15c4
|
[
"MIT"
] | 2
|
2020-11-12T17:01:38.000Z
|
2021-04-29T08:52:13.000Z
|
test/functional/omni_nonfungibletokens.py
|
fiscalobject/uniasset
|
54337e5bfae4af6b1ac453937038201835de15c4
|
[
"MIT"
] | null | null | null |
test/functional/omni_nonfungibletokens.py
|
fiscalobject/uniasset
|
54337e5bfae4af6b1ac453937038201835de15c4
|
[
"MIT"
] | 7
|
2020-10-31T12:14:31.000Z
|
2022-02-14T18:24:12.000Z
|
#!/usr/bin/env python3
# Copyright (c) 2017-2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test non-fungible tokens."""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.authproxy import JSONRPCException
from test_framework.util import assert_equal, sync_mempools, sync_blocks
class OmniNonFungibleTokensTest(BitcoinTestFramework):
def set_test_params(self):
self.num_nodes = 2
self.setup_clean_chain = True
def run_test(self):
# Get address for mining, issuance, granting and sending tokens to.
token_address = self.nodes[0].getnewaddress()
grant_address = self.nodes[0].getnewaddress()
destination_address = self.nodes[0].getnewaddress()
# Fund issuance address
self.nodes[0].generatetoaddress(110, token_address)
# Create test token
txid = self.nodes[0].omni_sendissuancemanaged(token_address, 2, 5, 0, "", "", "TESTTOKEN", "", "")
self.nodes[0].generatetoaddress(1, token_address)
# Checking the transaction was valid...
result = self.nodes[0].omni_gettransaction(txid)
assert_equal(result['valid'], True)
property_id = result["propertyid"]
# Send tokens we do not have yet
try:
self.nodes[0].omni_sendnonfungible(token_address, destination_address, property_id, 1, 1)
except JSONRPCException as e:
errorString = e.error['message']
assert("Sender has insufficient balance" in errorString)
errorString = ""
# Grant tokens to creator
txid = self.nodes[0].omni_sendgrant(token_address, "", property_id, "100", "")
self.nodes[0].generatetoaddress(1, token_address)
# Send tokens out of range start
try:
self.nodes[0].omni_sendnonfungible(token_address, destination_address, property_id, 0, 10)
except JSONRPCException as e:
errorString = e.error['message']
assert("Sender does not own the range" in errorString)
errorString = ""
# Send tokens out of range end
try:
self.nodes[0].omni_sendnonfungible(token_address, destination_address, property_id, 100, 101)
except JSONRPCException as e:
errorString = e.error['message']
assert("Sender does not own the range" in errorString)
errorString = ""
# Checking the transaction was valid...
result = self.nodes[0].omni_gettransaction(txid)
assert_equal(result['valid'], True)
assert_equal(result['tokenstart'], '1')
assert_equal(result['tokenend'], '100')
assert_equal(result['grantdata'], "")
result = self.nodes[0].omni_getnonfungibletokens(token_address, property_id)
assert_equal(result[0]['propertyid'], property_id)
assert_equal(result[0]['tokens'][0]['tokenstart'], 1)
assert_equal(result[0]['tokens'][0]['tokenend'], 100)
assert_equal(result[0]['tokens'][0]['amount'], 100)
result = self.nodes[0].omni_getnonfungibletokenranges(property_id)
assert_equal(result[0]['address'], token_address)
assert_equal(result[0]['tokenstart'], 1)
assert_equal(result[0]['tokenend'], 100)
assert_equal(result[0]['amount'], 100)
# Check data blank
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 1)
assert_equal(result[0]['owner'], token_address)
assert_equal(result[0]['grantdata'], '')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Grant 1 tokens with data
txid = self.nodes[0].omni_sendgrant(token_address, "", property_id, "1", "Test grantdata")
self.nodes[0].generatetoaddress(1, token_address)
# Checking the transaction was valid...
result = self.nodes[0].omni_gettransaction(txid)
assert_equal(result['valid'], True)
assert_equal(result['tokenstart'], '101')
assert_equal(result['tokenend'], '101')
assert_equal(result['grantdata'], "Test grantdata")
result = self.nodes[0].omni_getnonfungibletokens(token_address, property_id)
assert_equal(result[0]['tokens'][0]['tokenstart'], 1)
assert_equal(result[0]['tokens'][0]['tokenend'], 101)
assert_equal(result[0]['tokens'][0]['amount'], 101)
result = self.nodes[0].omni_getnonfungibletokenranges(property_id)
assert_equal(result[0]['address'], token_address)
assert_equal(result[0]['tokenstart'], 1)
assert_equal(result[0]['tokenend'], 101)
assert_equal(result[0]['amount'], 101)
# Make sure original token data unchanged
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 1)
assert_equal(result[0]['owner'], token_address)
assert_equal(result[0]['grantdata'], '')
# Check tokens have expected data set
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 101)
assert_equal(result[0]['owner'], token_address)
assert_equal(result[0]['grantdata'], 'Test grantdata')
# Grant 99 tokens with different data
txid = self.nodes[0].omni_sendgrant(token_address, "", property_id, "99", "Different grantdata")
self.nodes[0].generatetoaddress(1, token_address)
# Checking the transaction was valid...
result = self.nodes[0].omni_gettransaction(txid)
assert_equal(result['valid'], True)
assert_equal(result['tokenstart'], '102')
assert_equal(result['tokenend'], '200')
assert_equal(result['grantdata'], "Different grantdata")
result = self.nodes[0].omni_getnonfungibletokens(token_address, property_id)
assert_equal(result[0]['tokens'][0]['tokenstart'], 1)
assert_equal(result[0]['tokens'][0]['tokenend'], 200)
assert_equal(result[0]['tokens'][0]['amount'], 200)
result = self.nodes[0].omni_getnonfungibletokenranges(property_id)
assert_equal(result[0]['address'], token_address)
assert_equal(result[0]['tokenstart'], 1)
assert_equal(result[0]['tokenend'], 200)
assert_equal(result[0]['amount'], 200)
# Make sure previous token data unchanged
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 1)
assert_equal(result[0]['owner'], token_address)
assert_equal(result[0]['grantdata'], '')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Make sure previous token data unchanged
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 101)
assert_equal(result[0]['owner'], token_address)
assert_equal(result[0]['grantdata'], 'Test grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 200)
assert_equal(result[0]['owner'], token_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Grant 100 tokens to different address
txid = self.nodes[0].omni_sendgrant(token_address, grant_address, property_id, "100", "Multiple grantdata")
self.nodes[0].generatetoaddress(1, token_address)
# Checking the transaction was valid...
result = self.nodes[0].omni_gettransaction(txid)
assert_equal(result['valid'], True)
assert_equal(result['tokenstart'], '201')
assert_equal(result['tokenend'], '300')
assert_equal(result['grantdata'], "Multiple grantdata")
# No change here
result = self.nodes[0].omni_getnonfungibletokens(token_address, property_id)
assert_equal(result[0]['tokens'][0]['tokenstart'], 1)
assert_equal(result[0]['tokens'][0]['tokenend'], 200)
assert_equal(result[0]['tokens'][0]['amount'], 200)
# New tokens appear on this address
result = self.nodes[0].omni_getnonfungibletokens(grant_address, property_id)
assert_equal(result[0]['tokens'][0]['tokenstart'], 201)
assert_equal(result[0]['tokens'][0]['tokenend'], 300)
assert_equal(result[0]['tokens'][0]['amount'], 100)
# Two addresses now show for holding this token
result = self.nodes[0].omni_getnonfungibletokenranges(property_id)
assert_equal(result[0]['address'], token_address)
assert_equal(result[0]['tokenstart'], 1)
assert_equal(result[0]['tokenend'], 200)
assert_equal(result[0]['amount'], 200)
assert_equal(result[1]['address'], grant_address)
assert_equal(result[1]['tokenstart'], 201)
assert_equal(result[1]['tokenend'], 300)
assert_equal(result[1]['amount'], 100)
# Check data fields
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 300)
assert_equal(result[0]['owner'], grant_address)
assert_equal(result[0]['grantdata'], 'Multiple grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Test sending tokens to a new address
self.nodes[0].sendtoaddress(destination_address, 1)
self.nodes[0].sendtoaddress(grant_address, 1)
self.nodes[0].generatetoaddress(1, token_address)
# Send token range without data
self.nodes[0].omni_sendnonfungible(token_address, destination_address, property_id, 1, 10)
# Send single token with "Test grantdata"
self.nodes[0].omni_sendnonfungible(token_address, destination_address, property_id, 101, 101)
# Send token range from different address with "Multiple grantdata"
self.nodes[0].omni_sendnonfungible(grant_address, destination_address, property_id, 201, 210)
self.nodes[0].generatetoaddress(1, token_address)
# Send tokens that has already been sent from source address
try:
self.nodes[0].omni_sendnonfungible(token_address, destination_address, property_id, 101, 101)
except JSONRPCException as e:
errorString = e.error['message']
assert("Sender does not own the range" in errorString)
errorString = ""
# Check range has changed with gap in the middle
result = self.nodes[0].omni_getnonfungibletokens(token_address, property_id)
assert_equal(result[0]['tokens'][0]['tokenstart'], 11)
assert_equal(result[0]['tokens'][0]['tokenend'], 100)
assert_equal(result[0]['tokens'][0]['amount'], 90)
assert_equal(result[0]['tokens'][1]['tokenstart'], 102)
assert_equal(result[0]['tokens'][1]['tokenend'], 200)
assert_equal(result[0]['tokens'][1]['amount'], 99)
# Check range has changed
result = self.nodes[0].omni_getnonfungibletokens(grant_address, property_id)
assert_equal(result[0]['tokens'][0]['tokenstart'], 211)
assert_equal(result[0]['tokens'][0]['tokenend'], 300)
assert_equal(result[0]['tokens'][0]['amount'], 90)
# New tokens should be present in dest address
result = self.nodes[0].omni_getnonfungibletokens(destination_address, property_id)
assert_equal(result[0]['tokens'][0]['tokenstart'], 1)
assert_equal(result[0]['tokens'][0]['tokenend'], 10)
assert_equal(result[0]['tokens'][0]['amount'], 10)
assert_equal(result[0]['tokens'][1]['tokenstart'], 101)
assert_equal(result[0]['tokens'][1]['tokenend'], 101)
assert_equal(result[0]['tokens'][1]['amount'], 1)
assert_equal(result[0]['tokens'][2]['tokenstart'], 201)
assert_equal(result[0]['tokens'][2]['tokenend'], 210)
assert_equal(result[0]['tokens'][2]['amount'], 10)
# Three addresses now show for holding this token
result = self.nodes[0].omni_getnonfungibletokenranges(property_id)
assert_equal(result[0]['address'], destination_address)
assert_equal(result[0]['tokenstart'], 1)
assert_equal(result[0]['tokenend'], 10)
assert_equal(result[0]['amount'], 10)
assert_equal(result[1]['address'], token_address)
assert_equal(result[1]['tokenstart'], 11)
assert_equal(result[1]['tokenend'], 100)
assert_equal(result[1]['amount'], 90)
assert_equal(result[2]['address'], destination_address)
assert_equal(result[2]['tokenstart'], 101)
assert_equal(result[2]['tokenend'], 101)
assert_equal(result[2]['amount'], 1)
assert_equal(result[3]['address'], token_address)
assert_equal(result[3]['tokenstart'], 102)
assert_equal(result[3]['tokenend'], 200)
assert_equal(result[3]['amount'], 99)
assert_equal(result[4]['address'], destination_address)
assert_equal(result[4]['tokenstart'], 201)
assert_equal(result[4]['tokenend'], 210)
assert_equal(result[4]['amount'], 10)
assert_equal(result[5]['address'], grant_address)
assert_equal(result[5]['tokenstart'], 211)
assert_equal(result[5]['tokenend'], 300)
assert_equal(result[5]['amount'], 90)
# Check owner and data fields on transfered tokens
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 1)
assert_equal(result[0]['owner'], destination_address)
assert_equal(result[0]['grantdata'], '')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 101)
assert_equal(result[0]['owner'], destination_address)
assert_equal(result[0]['grantdata'], 'Test grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 201)
assert_equal(result[0]['owner'], destination_address)
assert_equal(result[0]['grantdata'], 'Multiple grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Set data on a non-issuer/holder address
non_issuer_address = self.nodes[1].getnewaddress()
self.nodes[0].sendtoaddress(non_issuer_address, 1)
self.nodes[0].generatetoaddress(1, token_address)
# Fail to set data on token we did not issue
try:
self.nodes[1].omni_setnonfungibledata(property_id, 102, 102, True, "Test issuerdata")
except JSONRPCException as e:
errorString = e.error['message']
assert("Error with selected inputs for the send transaction" in errorString)
errorString = ""
# Fail to set data on token we do not own
try:
self.nodes[1].omni_setnonfungibledata(property_id, 102, 102, False, "Test holderdata")
except JSONRPCException as e:
errorString = e.error['message']
assert("Error with selected inputs for the send transaction" in errorString)
errorString = ""
# Send token to new address
self.nodes[0].omni_sendnonfungible(token_address, non_issuer_address, property_id, 102, 111)
self.nodes[0].generatetoaddress(1, token_address)
self.sync_all()
# Fail to set data on token we did not issue but do own
try:
self.nodes[1].omni_setnonfungibledata(property_id, 102, 102, True, "Test issuerdata")
except JSONRPCException as e:
errorString = e.error['message']
assert("Error with selected inputs for the send transaction" in errorString)
errorString = ""
# Set holder data
self.nodes[1].omni_setnonfungibledata(property_id, 102, 102, False, "Test holderdata")
sync_mempools(self.nodes)
self.nodes[0].generatetoaddress(1, token_address)
sync_blocks(self.nodes)
# Before blank
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 101)
assert_equal(result[0]['owner'], destination_address)
assert_equal(result[0]['grantdata'], 'Test grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 102)
assert_equal(result[0]['owner'], non_issuer_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], 'Test holderdata')
# End new data
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 103)
assert_equal(result[0]['owner'], non_issuer_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Set holder data range and overwrite just set entry
self.nodes[1].omni_setnonfungibledata(property_id, 102, 111, False, "New holderdata")
sync_mempools(self.nodes)
self.nodes[0].generatetoaddress(1, token_address)
sync_blocks(self.nodes)
# Before blank
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 101)
assert_equal(result[0]['owner'], destination_address)
assert_equal(result[0]['grantdata'], 'Test grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Start new data
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 102)
assert_equal(result[0]['owner'], non_issuer_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], 'New holderdata')
# End new data
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 111)
assert_equal(result[0]['owner'], non_issuer_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], 'New holderdata')
# After blank
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 112)
assert_equal(result[0]['owner'], token_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Set holder data in the middle of the range
self.nodes[1].omni_setnonfungibledata(property_id, 106, 106, False, "Even newer holderdata")
sync_mempools(self.nodes)
self.nodes[0].generatetoaddress(1, token_address)
sync_blocks(self.nodes)
# Before blank
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 101)
assert_equal(result[0]['owner'], destination_address)
assert_equal(result[0]['grantdata'], 'Test grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Before range start
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 102)
assert_equal(result[0]['owner'], non_issuer_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], 'New holderdata')
# Before range end
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 105)
assert_equal(result[0]['owner'], non_issuer_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], 'New holderdata')
# Data changed?
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 106)
assert_equal(result[0]['owner'], non_issuer_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], 'Even newer holderdata')
# After range
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 107)
assert_equal(result[0]['owner'], non_issuer_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], 'New holderdata')
# End after range data
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 111)
assert_equal(result[0]['owner'], non_issuer_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], 'New holderdata')
# After blank
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 112)
assert_equal(result[0]['owner'], token_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Test issuer data
self.nodes[0].omni_setnonfungibledata(property_id, 106, 106, True, "Test issuerdata")
sync_mempools(self.nodes)
self.nodes[0].generatetoaddress(1, token_address)
sync_blocks(self.nodes)
# Before
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 105)
assert_equal(result[0]['owner'], non_issuer_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], 'New holderdata')
# Changed?
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 106)
assert_equal(result[0]['owner'], non_issuer_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], 'Test issuerdata')
assert_equal(result[0]['holderdata'], 'Even newer holderdata')
# After
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 107)
assert_equal(result[0]['owner'], non_issuer_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], 'New holderdata')
# Set issuer data across multiple ranges owned by different addresses
self.nodes[0].omni_setnonfungibledata(property_id, 101, 112, True, "Different issuerdata")
self.nodes[0].generatetoaddress(1, token_address)
sync_blocks(self.nodes)
# Before blank
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 100)
assert_equal(result[0]['owner'], token_address)
assert_equal(result[0]['grantdata'], '')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Before range start
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 101)
assert_equal(result[0]['owner'], destination_address)
assert_equal(result[0]['grantdata'], 'Test grantdata')
assert_equal(result[0]['issuerdata'], 'Different issuerdata')
assert_equal(result[0]['holderdata'], '')
# Changed previously set record?
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 106)
assert_equal(result[0]['owner'], non_issuer_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], 'Different issuerdata')
assert_equal(result[0]['holderdata'], 'Even newer holderdata')
# End after range data
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 112)
assert_equal(result[0]['owner'], token_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], 'Different issuerdata')
assert_equal(result[0]['holderdata'], '')
# After blank
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 113)
assert_equal(result[0]['owner'], token_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Send tokens and chekd data is the same
self.nodes[0].sendtoaddress(non_issuer_address, 1)
self.nodes[0].generatetoaddress(1, token_address)
sync_blocks(self.nodes)
self.nodes[1].omni_sendnonfungible(non_issuer_address, destination_address, property_id, 106, 111)
sync_mempools(self.nodes)
self.nodes[0].generatetoaddress(1, token_address)
# Before range start
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 105)
assert_equal(result[0]['owner'], non_issuer_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], 'Different issuerdata')
assert_equal(result[0]['holderdata'], 'New holderdata')
# Changed previously set record?
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 106)
assert_equal(result[0]['owner'], destination_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], 'Different issuerdata')
assert_equal(result[0]['holderdata'], 'Even newer holderdata')
# End after range data
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 111)
assert_equal(result[0]['owner'], destination_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], 'Different issuerdata')
assert_equal(result[0]['holderdata'], 'New holderdata')
# After blank
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 112)
assert_equal(result[0]['owner'], token_address)
assert_equal(result[0]['grantdata'], 'Different grantdata')
assert_equal(result[0]['issuerdata'], 'Different issuerdata')
assert_equal(result[0]['holderdata'], '')
# Test omni_getnonfungibletokendata ranges
result = self.nodes[0].omni_getnonfungibletokendata(property_id)
assert_equal(len(result), 300)
# Check first in range
assert_equal(result[0]['index'], 1)
assert_equal(result[0]['owner'], destination_address)
assert_equal(result[0]['grantdata'], '')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Check last in range
assert_equal(result[299]['index'], 300)
assert_equal(result[299]['owner'], grant_address)
assert_equal(result[299]['grantdata'], 'Multiple grantdata')
assert_equal(result[299]['issuerdata'], '')
assert_equal(result[299]['holderdata'], '')
# Below range will return first in range, only one result expected.
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 0)
assert_equal(len(result), 1)
assert_equal(result[0]['index'], 1)
assert_equal(result[0]['owner'], destination_address)
assert_equal(result[0]['grantdata'], '')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Above range will return last in range
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 301)
assert_equal(len(result), 1)
assert_equal(result[0]['index'], 300)
assert_equal(result[0]['owner'], grant_address)
assert_equal(result[0]['grantdata'], 'Multiple grantdata')
assert_equal(result[0]['issuerdata'], '')
assert_equal(result[0]['holderdata'], '')
# Test sub range
result = self.nodes[0].omni_getnonfungibletokendata(property_id, 101, 200)
assert_equal(len(result), 100)
assert_equal(result[0]['index'], 101)
assert_equal(result[0]['owner'], destination_address)
assert_equal(result[0]['grantdata'], 'Test grantdata')
assert_equal(result[0]['issuerdata'], 'Different issuerdata')
assert_equal(result[0]['holderdata'], '')
assert_equal(result[99]['index'], 200)
assert_equal(result[99]['owner'], token_address)
assert_equal(result[99]['grantdata'], 'Different grantdata')
assert_equal(result[99]['issuerdata'], '')
assert_equal(result[99]['holderdata'], '')
# Test omni_getnonfungibletokendata with multiple tokens on an address
txid = self.nodes[0].omni_sendissuancemanaged(token_address, 2, 5, 0, "", "", "TESTTOKEN2", "", "")
self.nodes[0].generatetoaddress(1, token_address)
# Checking the transaction was valid...
result = self.nodes[0].omni_gettransaction(txid)
assert_equal(result['valid'], True)
second_property_id = result["propertyid"]
# Grant tokens to creator
txid = self.nodes[0].omni_sendgrant(token_address, "", second_property_id, "100", "")
self.nodes[0].generatetoaddress(1, token_address)
# Check multiple properties returned
result = self.nodes[0].omni_getnonfungibletokens(token_address)
assert_equal(len(result), 2)
assert_equal(result[0]['propertyid'], property_id)
assert_equal(result[0]['tokens'][0]['tokenstart'], 11)
assert_equal(result[0]['tokens'][0]['tokenend'], 100)
assert_equal(result[0]['tokens'][0]['amount'], 90)
assert_equal(result[0]['tokens'][1]['tokenstart'], 112)
assert_equal(result[0]['tokens'][1]['tokenend'], 200)
assert_equal(result[0]['tokens'][1]['amount'], 89)
assert_equal(result[1]['propertyid'], second_property_id)
assert_equal(result[1]['tokens'][0]['tokenstart'], 1)
assert_equal(result[1]['tokens'][0]['tokenend'], 100)
assert_equal(result[1]['tokens'][0]['amount'], 100)
# Filter on first property ID
result = self.nodes[0].omni_getnonfungibletokens(token_address, property_id)
assert_equal(len(result), 1)
assert_equal(result[0]['propertyid'], property_id)
assert_equal(result[0]['tokens'][0]['tokenstart'], 11)
assert_equal(result[0]['tokens'][0]['tokenend'], 100)
assert_equal(result[0]['tokens'][0]['amount'], 90)
assert_equal(result[0]['tokens'][1]['tokenstart'], 112)
assert_equal(result[0]['tokens'][1]['tokenend'], 200)
assert_equal(result[0]['tokens'][1]['amount'], 89)
# Filter on second property ID
result = self.nodes[0].omni_getnonfungibletokens(token_address, second_property_id)
assert_equal(len(result), 1)
assert_equal(result[0]['propertyid'], second_property_id)
assert_equal(result[0]['tokens'][0]['tokenstart'], 1)
assert_equal(result[0]['tokens'][0]['tokenend'], 100)
assert_equal(result[0]['tokens'][0]['amount'], 100)
if __name__ == '__main__':
OmniNonFungibleTokensTest().main()
| 47.925191
| 115
| 0.656207
| 3,664
| 31,391
| 5.447052
| 0.066321
| 0.163143
| 0.245315
| 0.209239
| 0.88105
| 0.831196
| 0.799429
| 0.765357
| 0.750125
| 0.73825
| 0
| 0.038357
| 0.201045
| 31,391
| 654
| 116
| 47.998471
| 0.757416
| 0.086713
| 0
| 0.719486
| 0
| 0
| 0.15148
| 0
| 0
| 0
| 0
| 0
| 0.648822
| 1
| 0.004283
| false
| 0
| 0.006424
| 0
| 0.012848
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
a1ba6423e67f441dae8f1b1558ee6cb608829a25
| 231
|
py
|
Python
|
Calculator/__init__.py
|
biswanathdehury01/Create-your-own-Python-Library
|
9b9fdff1a4fa59b3d65328d3dd2d13abf525bc5b
|
[
"MIT"
] | null | null | null |
Calculator/__init__.py
|
biswanathdehury01/Create-your-own-Python-Library
|
9b9fdff1a4fa59b3d65328d3dd2d13abf525bc5b
|
[
"MIT"
] | null | null | null |
Calculator/__init__.py
|
biswanathdehury01/Create-your-own-Python-Library
|
9b9fdff1a4fa59b3d65328d3dd2d13abf525bc5b
|
[
"MIT"
] | null | null | null |
def add_numbers(num1 , num2):
return num1 + num2
def subtract_numbers(num1 , num2):
return num1 - num2
def multiply_numbers(num1 , num2):
return num1 * num2
def divide_numbers(num1 , num2):
return num1 / num2
| 19.25
| 34
| 0.679654
| 32
| 231
| 4.78125
| 0.28125
| 0.418301
| 0.392157
| 0.54902
| 0.816993
| 0.816993
| 0.627451
| 0
| 0
| 0
| 0
| 0.090395
| 0.233766
| 231
| 11
| 35
| 21
| 0.774011
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 8
|
a1d56aeb1ed8024b5621fe6a0a986e27e8271aac
| 115,938
|
py
|
Python
|
wavefront_api_client/api/account__user_and_service_account_api.py
|
httpsgithu/python-client
|
f85a530367cdabe458a11919ad35609b9bc0606b
|
[
"Apache-2.0"
] | null | null | null |
wavefront_api_client/api/account__user_and_service_account_api.py
|
httpsgithu/python-client
|
f85a530367cdabe458a11919ad35609b9bc0606b
|
[
"Apache-2.0"
] | null | null | null |
wavefront_api_client/api/account__user_and_service_account_api.py
|
httpsgithu/python-client
|
f85a530367cdabe458a11919ad35609b9bc0606b
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Wavefront REST API Documentation
<p>The Wavefront REST API enables you to interact with Wavefront servers using standard REST API tools. You can use the REST API to automate commonly executed operations such as automatically tagging sources.</p><p>When you make REST API calls outside the Wavefront REST API documentation you must add the header \"Authorization: Bearer <<API-TOKEN>>\" to your HTTP requests.</p> # noqa: E501
OpenAPI spec version: v2
Contact: chitimba@wavefront.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from wavefront_api_client.api_client import ApiClient
class AccountUserAndServiceAccountApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def activate_account(self, id, **kwargs): # noqa: E501
"""Activates the given service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.activate_account(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerServiceAccount
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.activate_account_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.activate_account_with_http_info(id, **kwargs) # noqa: E501
return data
def activate_account_with_http_info(self, id, **kwargs): # noqa: E501
"""Activates the given service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.activate_account_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerServiceAccount
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method activate_account" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `activate_account`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/serviceaccount/{id}/activate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerServiceAccount', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_account_to_roles(self, id, **kwargs): # noqa: E501
"""Adds specific roles to the account (user or service account) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_account_to_roles(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: The list of roles that should be added to the account
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_account_to_roles_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.add_account_to_roles_with_http_info(id, **kwargs) # noqa: E501
return data
def add_account_to_roles_with_http_info(self, id, **kwargs): # noqa: E501
"""Adds specific roles to the account (user or service account) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_account_to_roles_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: The list of roles that should be added to the account
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_account_to_roles" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `add_account_to_roles`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/{id}/addRoles', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_account_to_user_groups(self, id, **kwargs): # noqa: E501
"""Adds specific groups to the account (user or service account) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_account_to_user_groups(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: The list of groups that should be added to the account
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_account_to_user_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.add_account_to_user_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def add_account_to_user_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Adds specific groups to the account (user or service account) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_account_to_user_groups_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: The list of groups that should be added to the account
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_account_to_user_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `add_account_to_user_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/{id}/addUserGroups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_ingestion_policy(self, **kwargs): # noqa: E501
"""Add a specific ingestion policy to multiple accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_ingestion_policy(async_req=True)
>>> result = thread.get()
:param async_req bool
:param IngestionPolicyMapping body: Example Body: <pre>{ \"ingestionPolicyId\": \"Ingestion policy identifier\", \"accounts\": [ \"account1\", \"account2\", \"account3\" ], \"groups\": [ \"group1\", \"group2\" ] }</pre>
:return: ResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_ingestion_policy_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.add_ingestion_policy_with_http_info(**kwargs) # noqa: E501
return data
def add_ingestion_policy_with_http_info(self, **kwargs): # noqa: E501
"""Add a specific ingestion policy to multiple accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_ingestion_policy_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param IngestionPolicyMapping body: Example Body: <pre>{ \"ingestionPolicyId\": \"Ingestion policy identifier\", \"accounts\": [ \"account1\", \"account2\", \"account3\" ], \"groups\": [ \"group1\", \"group2\" ] }</pre>
:return: ResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_ingestion_policy" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/addingestionpolicy', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def add_single_ingestion_policy(self, **kwargs): # noqa: E501
"""Add single ingestion policy to multiple accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_single_ingestion_policy(async_req=True)
>>> result = thread.get()
:param async_req bool
:param IngestionPolicyMapping body: Example Body: <pre>{ \"ingestionPolicyId\": \"Ingestion policy identifier\", \"accounts\": [ \"account1\", \"account2\", \"account3\" ], \"groups\": [ \"group1\", \"group2\" ] }</pre>
:return: ResponseContainerUserDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.add_single_ingestion_policy_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.add_single_ingestion_policy_with_http_info(**kwargs) # noqa: E501
return data
def add_single_ingestion_policy_with_http_info(self, **kwargs): # noqa: E501
"""Add single ingestion policy to multiple accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.add_single_ingestion_policy_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param IngestionPolicyMapping body: Example Body: <pre>{ \"ingestionPolicyId\": \"Ingestion policy identifier\", \"accounts\": [ \"account1\", \"account2\", \"account3\" ], \"groups\": [ \"group1\", \"group2\" ] }</pre>
:return: ResponseContainerUserDTO
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method add_single_ingestion_policy" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/addIngestionPolicy', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerUserDTO', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_or_update_user_account(self, **kwargs): # noqa: E501
"""Creates or updates a user account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_or_update_user_account(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool send_email: Whether to send email notification to the user, if created. Default: false
:param UserToCreate body: Example Body: <pre>{ \"emailAddress\": \"user@example.com\", \"groups\": [ \"user_management\" ], \"userGroups\": [ \"8b23136b-ecd2-4cb5-8c92-62477dcc4090\" ], \"roles\": [ \"Role\" ], \"ingestionPolicies\": [ \"policyId1\", \"policyId2\" ] }</pre>
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_or_update_user_account_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_or_update_user_account_with_http_info(**kwargs) # noqa: E501
return data
def create_or_update_user_account_with_http_info(self, **kwargs): # noqa: E501
"""Creates or updates a user account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_or_update_user_account_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param bool send_email: Whether to send email notification to the user, if created. Default: false
:param UserToCreate body: Example Body: <pre>{ \"emailAddress\": \"user@example.com\", \"groups\": [ \"user_management\" ], \"userGroups\": [ \"8b23136b-ecd2-4cb5-8c92-62477dcc4090\" ], \"roles\": [ \"Role\" ], \"ingestionPolicies\": [ \"policyId1\", \"policyId2\" ] }</pre>
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['send_email', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_or_update_user_account" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'send_email' in params:
query_params.append(('sendEmail', params['send_email'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/user', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_service_account(self, **kwargs): # noqa: E501
"""Creates a service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_service_account(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ServiceAccountWrite body:
:return: ResponseContainerServiceAccount
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_service_account_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.create_service_account_with_http_info(**kwargs) # noqa: E501
return data
def create_service_account_with_http_info(self, **kwargs): # noqa: E501
"""Creates a service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_service_account_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param ServiceAccountWrite body:
:return: ResponseContainerServiceAccount
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_service_account" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/serviceaccount', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerServiceAccount', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def deactivate_account(self, id, **kwargs): # noqa: E501
"""Deactivates the given service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deactivate_account(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerServiceAccount
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.deactivate_account_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.deactivate_account_with_http_info(id, **kwargs) # noqa: E501
return data
def deactivate_account_with_http_info(self, id, **kwargs): # noqa: E501
"""Deactivates the given service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.deactivate_account_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerServiceAccount
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method deactivate_account" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `deactivate_account`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/serviceaccount/{id}/deactivate', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerServiceAccount', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_account(self, id, **kwargs): # noqa: E501
"""Deletes an account (user or service account) identified by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_account(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerAccount
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_account_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.delete_account_with_http_info(id, **kwargs) # noqa: E501
return data
def delete_account_with_http_info(self, id, **kwargs): # noqa: E501
"""Deletes an account (user or service account) identified by id # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_account_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerAccount
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_account" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `delete_account`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerAccount', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_multiple_accounts(self, **kwargs): # noqa: E501
"""Deletes multiple accounts (users or service accounts) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_multiple_accounts(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] body: list of accounts' identifiers to be deleted
:return: ResponseContainerListString
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_multiple_accounts_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.delete_multiple_accounts_with_http_info(**kwargs) # noqa: E501
return data
def delete_multiple_accounts_with_http_info(self, **kwargs): # noqa: E501
"""Deletes multiple accounts (users or service accounts) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_multiple_accounts_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] body: list of accounts' identifiers to be deleted
:return: ResponseContainerListString
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_multiple_accounts" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/deleteAccounts', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerListString', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_account(self, id, **kwargs): # noqa: E501
"""Get a specific account (user or service account) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_account(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerAccount
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_account_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_account_with_http_info(id, **kwargs) # noqa: E501
return data
def get_account_with_http_info(self, id, **kwargs): # noqa: E501
"""Get a specific account (user or service account) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_account_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerAccount
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_account" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_account`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerAccount', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_account_business_functions(self, id, **kwargs): # noqa: E501
"""Returns business functions of a specific account (user or service account). # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_account_business_functions(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSetBusinessFunction
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_account_business_functions_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_account_business_functions_with_http_info(id, **kwargs) # noqa: E501
return data
def get_account_business_functions_with_http_info(self, id, **kwargs): # noqa: E501
"""Returns business functions of a specific account (user or service account). # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_account_business_functions_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerSetBusinessFunction
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_account_business_functions" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_account_business_functions`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/{id}/businessFunctions', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerSetBusinessFunction', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_accounts(self, **kwargs): # noqa: E501
"""Get all accounts (users and service accounts) of a customer # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_accounts(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset:
:param int limit:
:return: ResponseContainerPagedAccount
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_accounts_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_accounts_with_http_info(**kwargs) # noqa: E501
return data
def get_all_accounts_with_http_info(self, **kwargs): # noqa: E501
"""Get all accounts (users and service accounts) of a customer # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_accounts_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param int offset:
:param int limit:
:return: ResponseContainerPagedAccount
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['offset', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_accounts" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerPagedAccount', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_service_accounts(self, **kwargs): # noqa: E501
"""Get all service accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_service_accounts(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ResponseContainerListServiceAccount
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_service_accounts_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_service_accounts_with_http_info(**kwargs) # noqa: E501
return data
def get_all_service_accounts_with_http_info(self, **kwargs): # noqa: E501
"""Get all service accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_service_accounts_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ResponseContainerListServiceAccount
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_service_accounts" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/serviceaccount', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerListServiceAccount', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_all_user_accounts(self, **kwargs): # noqa: E501
"""Get all user accounts # noqa: E501
Returns all user accounts # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_user_accounts(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[UserModel]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_all_user_accounts_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.get_all_user_accounts_with_http_info(**kwargs) # noqa: E501
return data
def get_all_user_accounts_with_http_info(self, **kwargs): # noqa: E501
"""Get all user accounts # noqa: E501
Returns all user accounts # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_all_user_accounts_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: list[UserModel]
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_all_user_accounts" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/user', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[UserModel]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_service_account(self, id, **kwargs): # noqa: E501
"""Retrieves a service account by identifier # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_service_account(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerServiceAccount
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_service_account_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_service_account_with_http_info(id, **kwargs) # noqa: E501
return data
def get_service_account_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieves a service account by identifier # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_service_account_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: ResponseContainerServiceAccount
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_service_account" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_service_account`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/serviceaccount/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerServiceAccount', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_user_account(self, id, **kwargs): # noqa: E501
"""Retrieves a user by identifier (email address) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_account(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_user_account_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.get_user_account_with_http_info(id, **kwargs) # noqa: E501
return data
def get_user_account_with_http_info(self, id, **kwargs): # noqa: E501
"""Retrieves a user by identifier (email address) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_user_account_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_user_account" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `get_user_account`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/user/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def grant_account_permission(self, id, permission, **kwargs): # noqa: E501
"""Grants a specific permission to account (user or service account) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.grant_account_permission(id, permission, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str permission: Permission to grant to the account. Please note that'host_tag_management' is the equivalent of the 'Source Tag Management' permission (required)
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.grant_account_permission_with_http_info(id, permission, **kwargs) # noqa: E501
else:
(data) = self.grant_account_permission_with_http_info(id, permission, **kwargs) # noqa: E501
return data
def grant_account_permission_with_http_info(self, id, permission, **kwargs): # noqa: E501
"""Grants a specific permission to account (user or service account) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.grant_account_permission_with_http_info(id, permission, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str permission: Permission to grant to the account. Please note that'host_tag_management' is the equivalent of the 'Source Tag Management' permission (required)
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'permission'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method grant_account_permission" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `grant_account_permission`") # noqa: E501
# verify the required parameter 'permission' is set
if self.api_client.client_side_validation and ('permission' not in params or
params['permission'] is None): # noqa: E501
raise ValueError("Missing the required parameter `permission` when calling `grant_account_permission`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'permission' in params:
path_params['permission'] = params['permission'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/{id}/grant/{permission}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def grant_permission_to_accounts(self, permission, **kwargs): # noqa: E501
"""Grants a specific permission to multiple accounts (users or service accounts) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.grant_permission_to_accounts(permission, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str permission: Permission to grant to the accounts. Please note that 'host_tag_management' is the equivalent of the 'Source Tag Management' permission (required)
:param list[str] body: List of accounts the specified permission to be granted to
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.grant_permission_to_accounts_with_http_info(permission, **kwargs) # noqa: E501
else:
(data) = self.grant_permission_to_accounts_with_http_info(permission, **kwargs) # noqa: E501
return data
def grant_permission_to_accounts_with_http_info(self, permission, **kwargs): # noqa: E501
"""Grants a specific permission to multiple accounts (users or service accounts) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.grant_permission_to_accounts_with_http_info(permission, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str permission: Permission to grant to the accounts. Please note that 'host_tag_management' is the equivalent of the 'Source Tag Management' permission (required)
:param list[str] body: List of accounts the specified permission to be granted to
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['permission', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method grant_permission_to_accounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'permission' is set
if self.api_client.client_side_validation and ('permission' not in params or
params['permission'] is None): # noqa: E501
raise ValueError("Missing the required parameter `permission` when calling `grant_permission_to_accounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'permission' in params:
path_params['permission'] = params['permission'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/grant/{permission}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def invite_user_accounts(self, **kwargs): # noqa: E501
"""Invite user accounts with given user groups and permissions. # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.invite_user_accounts(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[UserToCreate] body: Example Body: <pre>[ { \"emailAddress\": \"user@example.com\", \"groups\": [ \"user_management\" ], \"userGroups\": [ \"8b23136b-ecd2-4cb5-8c92-62477dcc4090\" ], \"roles\": [ \"Role\" ], \"ingestionPolicies\": [ \"policyId1\", \"policyId2\" ] } ]</pre>
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.invite_user_accounts_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.invite_user_accounts_with_http_info(**kwargs) # noqa: E501
return data
def invite_user_accounts_with_http_info(self, **kwargs): # noqa: E501
"""Invite user accounts with given user groups and permissions. # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.invite_user_accounts_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[UserToCreate] body: Example Body: <pre>[ { \"emailAddress\": \"user@example.com\", \"groups\": [ \"user_management\" ], \"userGroups\": [ \"8b23136b-ecd2-4cb5-8c92-62477dcc4090\" ], \"roles\": [ \"Role\" ], \"ingestionPolicies\": [ \"policyId1\", \"policyId2\" ] } ]</pre>
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method invite_user_accounts" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/user/invite', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_account_from_roles(self, id, **kwargs): # noqa: E501
"""Removes specific roles from the account (user or service account) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_account_from_roles(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: The list of roles that should be removed from the account
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_account_from_roles_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.remove_account_from_roles_with_http_info(id, **kwargs) # noqa: E501
return data
def remove_account_from_roles_with_http_info(self, id, **kwargs): # noqa: E501
"""Removes specific roles from the account (user or service account) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_account_from_roles_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: The list of roles that should be removed from the account
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_account_from_roles" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `remove_account_from_roles`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/{id}/removeRoles', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_account_from_user_groups(self, id, **kwargs): # noqa: E501
"""Removes specific groups from the account (user or service account) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_account_from_user_groups(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: The list of groups that should be removed from the account
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_account_from_user_groups_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.remove_account_from_user_groups_with_http_info(id, **kwargs) # noqa: E501
return data
def remove_account_from_user_groups_with_http_info(self, id, **kwargs): # noqa: E501
"""Removes specific groups from the account (user or service account) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_account_from_user_groups_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param list[str] body: The list of groups that should be removed from the account
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_account_from_user_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `remove_account_from_user_groups`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/{id}/removeUserGroups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_ingestion_policies(self, **kwargs): # noqa: E501
"""Removes ingestion policies from multiple accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_ingestion_policies(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] body: identifiers of list of accounts from which ingestion policies should be removed
:return: ResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_ingestion_policies_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.remove_ingestion_policies_with_http_info(**kwargs) # noqa: E501
return data
def remove_ingestion_policies_with_http_info(self, **kwargs): # noqa: E501
"""Removes ingestion policies from multiple accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_ingestion_policies_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] body: identifiers of list of accounts from which ingestion policies should be removed
:return: ResponseContainer
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_ingestion_policies" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/removeingestionpolicies', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainer', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_single_ingestion_policy(self, **kwargs): # noqa: E501
"""Removes single ingestion policy from multiple accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_single_ingestion_policy(async_req=True)
>>> result = thread.get()
:param async_req bool
:param IngestionPolicyMapping body: Example Body: <pre>{ \"ingestionPolicyId\": \"Ingestion policy identifier\", \"accounts\": [ \"account1\", \"account2\", \"account3\" ], \"groups\": [ \"group1\", \"group2\" ] }</pre>
:return: ResponseContainerUserDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.remove_single_ingestion_policy_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.remove_single_ingestion_policy_with_http_info(**kwargs) # noqa: E501
return data
def remove_single_ingestion_policy_with_http_info(self, **kwargs): # noqa: E501
"""Removes single ingestion policy from multiple accounts # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.remove_single_ingestion_policy_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param IngestionPolicyMapping body: Example Body: <pre>{ \"ingestionPolicyId\": \"Ingestion policy identifier\", \"accounts\": [ \"account1\", \"account2\", \"account3\" ], \"groups\": [ \"group1\", \"group2\" ] }</pre>
:return: ResponseContainerUserDTO
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_single_ingestion_policy" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/removeIngestionPolicy', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerUserDTO', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def revoke_account_permission(self, id, permission, **kwargs): # noqa: E501
"""Revokes a specific permission from account (user or service account) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.revoke_account_permission(id, permission, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str permission: (required)
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.revoke_account_permission_with_http_info(id, permission, **kwargs) # noqa: E501
else:
(data) = self.revoke_account_permission_with_http_info(id, permission, **kwargs) # noqa: E501
return data
def revoke_account_permission_with_http_info(self, id, permission, **kwargs): # noqa: E501
"""Revokes a specific permission from account (user or service account) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.revoke_account_permission_with_http_info(id, permission, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param str permission: (required)
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'permission'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method revoke_account_permission" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `revoke_account_permission`") # noqa: E501
# verify the required parameter 'permission' is set
if self.api_client.client_side_validation and ('permission' not in params or
params['permission'] is None): # noqa: E501
raise ValueError("Missing the required parameter `permission` when calling `revoke_account_permission`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
if 'permission' in params:
path_params['permission'] = params['permission'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/{id}/revoke/{permission}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def revoke_permission_from_accounts(self, permission, **kwargs): # noqa: E501
"""Revokes a specific permission from multiple accounts (users or service accounts) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.revoke_permission_from_accounts(permission, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str permission: Permission to revoke from the accounts. Please note that 'host_tag_management' is the equivalent of the 'Source Tag Management' permission (required)
:param list[str] body: List of accounts the specified permission to be revoked from
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.revoke_permission_from_accounts_with_http_info(permission, **kwargs) # noqa: E501
else:
(data) = self.revoke_permission_from_accounts_with_http_info(permission, **kwargs) # noqa: E501
return data
def revoke_permission_from_accounts_with_http_info(self, permission, **kwargs): # noqa: E501
"""Revokes a specific permission from multiple accounts (users or service accounts) # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.revoke_permission_from_accounts_with_http_info(permission, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str permission: Permission to revoke from the accounts. Please note that 'host_tag_management' is the equivalent of the 'Source Tag Management' permission (required)
:param list[str] body: List of accounts the specified permission to be revoked from
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['permission', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method revoke_permission_from_accounts" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'permission' is set
if self.api_client.client_side_validation and ('permission' not in params or
params['permission'] is None): # noqa: E501
raise ValueError("Missing the required parameter `permission` when calling `revoke_permission_from_accounts`") # noqa: E501
collection_formats = {}
path_params = {}
if 'permission' in params:
path_params['permission'] = params['permission'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/revoke/{permission}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_service_account(self, id, **kwargs): # noqa: E501
"""Updates the service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_service_account(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param ServiceAccountWrite body:
:return: ResponseContainerServiceAccount
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_service_account_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_service_account_with_http_info(id, **kwargs) # noqa: E501
return data
def update_service_account_with_http_info(self, id, **kwargs): # noqa: E501
"""Updates the service account # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_service_account_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param ServiceAccountWrite body:
:return: ResponseContainerServiceAccount
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_service_account" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `update_service_account`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/serviceaccount/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerServiceAccount', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_user_account(self, id, **kwargs): # noqa: E501
"""Update user with given user groups, permissions and ingestion policy. # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user_account(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param UserRequestDTO body: Example Body: <pre>{ \"identifier\": \"user@example.com\", \"groups\": [ \"user_management\" ], \"userGroups\": [ \"8b23136b-ecd2-4cb5-8c92-62477dcc4090\" ], \"ingestionPolicies\": [ \"policy_id\" ], \"roles\": [ \"Role\" ] }</pre>
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_user_account_with_http_info(id, **kwargs) # noqa: E501
else:
(data) = self.update_user_account_with_http_info(id, **kwargs) # noqa: E501
return data
def update_user_account_with_http_info(self, id, **kwargs): # noqa: E501
"""Update user with given user groups, permissions and ingestion policy. # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_user_account_with_http_info(id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str id: (required)
:param UserRequestDTO body: Example Body: <pre>{ \"identifier\": \"user@example.com\", \"groups\": [ \"user_management\" ], \"userGroups\": [ \"8b23136b-ecd2-4cb5-8c92-62477dcc4090\" ], \"ingestionPolicies\": [ \"policy_id\" ], \"roles\": [ \"Role\" ] }</pre>
:return: UserModel
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_user_account" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if self.api_client.client_side_validation and ('id' not in params or
params['id'] is None): # noqa: E501
raise ValueError("Missing the required parameter `id` when calling `update_user_account`") # noqa: E501
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/user/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UserModel', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def validate_accounts(self, **kwargs): # noqa: E501
"""Returns valid accounts (users and service accounts), also invalid identifiers from the given list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.validate_accounts(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] body:
:return: ResponseContainerValidatedUsersDTO
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.validate_accounts_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.validate_accounts_with_http_info(**kwargs) # noqa: E501
return data
def validate_accounts_with_http_info(self, **kwargs): # noqa: E501
"""Returns valid accounts (users and service accounts), also invalid identifiers from the given list # noqa: E501
# noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.validate_accounts_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:param list[str] body:
:return: ResponseContainerValidatedUsersDTO
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method validate_accounts" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['api_key'] # noqa: E501
return self.api_client.call_api(
'/api/v2/account/validateAccounts', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ResponseContainerValidatedUsersDTO', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 40.340292
| 409
| 0.601537
| 12,937
| 115,938
| 5.141377
| 0.022648
| 0.054244
| 0.024416
| 0.031392
| 0.9782
| 0.975659
| 0.97375
| 0.96945
| 0.967255
| 0.964128
| 0
| 0.019266
| 0.304723
| 115,938
| 2,873
| 410
| 40.354333
| 0.805878
| 0.339397
| 0
| 0.841833
| 0
| 0
| 0.173017
| 0.056477
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038089
| false
| 0
| 0.002582
| 0
| 0.097482
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a1fbedb1193de89c013c71cc079add436955beb9
| 4,178
|
py
|
Python
|
test/toolset-mock/src/clang-vxworks-4.0.1.py
|
MaxSac/build
|
482c25f3a26171073c7e6c59f0427f2259a63fec
|
[
"BSL-1.0"
] | 11,356
|
2017-12-08T19:42:32.000Z
|
2022-03-31T16:55:25.000Z
|
test/toolset-mock/src/clang-vxworks-4.0.1.py
|
MaxSac/build
|
482c25f3a26171073c7e6c59f0427f2259a63fec
|
[
"BSL-1.0"
] | 2,402
|
2017-12-08T22:31:01.000Z
|
2022-03-28T19:25:52.000Z
|
test/toolset-mock/src/clang-vxworks-4.0.1.py
|
MaxSac/build
|
482c25f3a26171073c7e6c59f0427f2259a63fec
|
[
"BSL-1.0"
] | 1,343
|
2017-12-08T19:47:19.000Z
|
2022-03-26T11:31:36.000Z
|
#!/usr/bin/python
#
# Copyright 2018 Steven Watanabe
#
# Distributed under the Boost Software License, Version 1.0.
# (See accompanying file LICENSE_1_0.txt or copy at
# http://www.boost.org/LICENSE_1_0.txt)
from MockProgram import *
command('clang++', '-print-prog-name=ar', stdout=script('ar.py'))
command('clang++', '-print-prog-name=ranlib', stdout=script('ranlib.py'))
if allow_properties('variant=debug', 'link=shared', 'threading=single', 'runtime-link=shared'):
command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-fPIC', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/lib.o'), input_file(source='lib.cpp'))
command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-fPIC', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/main.o'), input_file(source='main.cpp'))
if allow_properties('variant=release', 'link=shared', 'threading=single', 'runtime-link=shared', 'strip=on'):
command('clang++', unordered(ordered('-x', 'c++'), '-O3', '-Wno-inline', '-Wall', '-fPIC', '-DNDEBUG', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/release/lib.o'), input_file(source='lib.cpp'))
command('clang++', unordered(ordered('-x', 'c++'), '-O3', '-Wno-inline', '-Wall', '-fPIC', '-DNDEBUG', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/release/main.o'), input_file(source='main.cpp'))
if allow_properties('variant=debug', 'link=shared', 'threading=multi', 'runtime-link=shared'):
command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-fPIC', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/threading-multi/lib.o'), input_file(source='lib.cpp'))
command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-fPIC', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/threading-multi/main.o'), input_file(source='main.cpp'))
if allow_properties('variant=debug', 'link=static', 'threading=single', 'runtime-link=shared'):
command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/link-static/lib.o'), input_file(source='lib.cpp'))
command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/link-static/main.o'), input_file(source='main.cpp'))
if allow_properties('variant=debug', 'link=static', 'threading=single', 'runtime-link=static'):
command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/link-static/runtime-link-static/lib.o'), input_file(source='lib.cpp'))
command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/link-static/runtime-link-static/main.o'), input_file(source='main.cpp'))
if allow_properties('variant=debug', 'link=shared', 'threading=single', 'runtime-link=shared', 'architecture=x86', 'address-model=32'):
command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-march=i686', '-m32', '-fPIC', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/lib.o'), input_file(source='lib.cpp'))
command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-Wall', '-g', '-march=i686', '-m32', '-fPIC', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/main.o'), input_file(source='main.cpp'))
if allow_properties('variant=debug', 'link=shared', 'threading=single', 'runtime-link=shared', 'rtti=off', 'exception-handling=off'):
command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-fno-rtti', '-fno-exceptions', '-Wall', '-g', '-fPIC', '-D_NO_RTTI', '-D_NO_EX=1', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/lib.o'), input_file(source='lib.cpp'))
command('clang++', unordered(ordered('-x', 'c++'), '-O0', '-fno-inline', '-fno-rtti', '-fno-exceptions', '-Wall', '-g', '-fPIC', '-D_NO_RTTI', '-D_NO_EX=1', '-c'), '-o', output_file('bin/clang-vxworks-4.0.1/debug/main.o'), input_file(source='main.cpp'))
main()
| 97.162791
| 257
| 0.62853
| 616
| 4,178
| 4.186688
| 0.149351
| 0.074447
| 0.113998
| 0.151997
| 0.882513
| 0.863125
| 0.863125
| 0.84684
| 0.84684
| 0.84684
| 0
| 0.021255
| 0.076592
| 4,178
| 42
| 258
| 99.47619
| 0.647227
| 0.046434
| 0
| 0
| 0
| 0.24
| 0.49736
| 0.167463
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.04
| 0
| 0.04
| 0.08
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
62fce2061fc9da6bb7ddbbfd2bb56160b5711c03
| 84,404
|
py
|
Python
|
tests/api_tests/test_workspace_audit_and_research_directory_api.py
|
all-of-us/raw-data-repository
|
d28ad957557587b03ff9c63d55dd55e0508f91d8
|
[
"BSD-3-Clause"
] | 39
|
2017-10-13T19:16:27.000Z
|
2021-09-24T16:58:21.000Z
|
tests/api_tests/test_workspace_audit_and_research_directory_api.py
|
all-of-us/raw-data-repository
|
d28ad957557587b03ff9c63d55dd55e0508f91d8
|
[
"BSD-3-Clause"
] | 312
|
2017-09-08T15:42:13.000Z
|
2022-03-23T18:21:40.000Z
|
tests/api_tests/test_workspace_audit_and_research_directory_api.py
|
all-of-us/raw-data-repository
|
d28ad957557587b03ff9c63d55dd55e0508f91d8
|
[
"BSD-3-Clause"
] | 19
|
2017-09-15T13:58:00.000Z
|
2022-02-07T18:33:20.000Z
|
from tests.helpers.unittest_base import BaseTestCase
from rdr_service.clock import FakeClock
from rdr_service import clock
from datetime import timedelta
class ResearchProjectsDirectoryApiTest(BaseTestCase):
def setUp(self):
super().setUp(with_data=False)
def test_get_research_projects_directory_end_to_end(self):
# create researchers
researchers_json = [
{
"userId": 0,
"creationTime": "2019-11-26T21:21:13.056Z",
"modifiedTime": "2019-11-26T21:21:13.056Z",
"givenName": "given name 1",
"familyName": "family name 1",
"streetAddress1": "string",
"streetAddress2": "string",
"city": "string",
"state": "string",
"zipCode": "string",
"country": "string",
"ethnicity": "HISPANIC",
"gender": ["MAN"],
"race": ["AIAN"],
"degree": ["PHD", "MPH"],
"sexAtBirth": ["FEMALE"],
"sexualOrientation": "BISEXUAL",
"affiliations": [
{
"institution": "institution1",
"role": "institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
],
"verifiedInstitutionalAffiliation": {
"institutionDisplayName": "display name",
"institutionShortName": "verified institution",
"institutionalRole": "verified institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
},
{
"userId": 1,
"creationTime": "2019-11-27T21:21:13.056Z",
"modifiedTime": "2019-11-27T21:21:13.056Z",
"givenName": "given name 2",
"familyName": "family name 2",
"streetAddress1": "string2",
"streetAddress2": "string2",
"city": "string2",
"state": "string2",
"zipCode": "string2",
"country": "string2",
"ethnicity": "HISPANIC",
"sexualOrientation": "BISEXUAL",
"gender": ["MAN", "WOMAN"],
"race": ["AIAN", "WHITE"],
"degree": ["PHD", "MPH"],
"affiliations": [
{
"institution": "institution2",
"role": "institution role 2"
},
{
"institution": "institution22",
"role": "institution role 22",
"nonAcademicAffiliation": "INDUSTRY"
}
],
"verifiedInstitutionalAffiliation": {
"institutionShortName": "verified institution",
"institutionalRole": "verified institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
}
]
self.send_post('workbench/directory/researchers', request_data=researchers_json)
# create workspace
request_json = [
{
"workspaceId": 0,
"name": "workspace name str",
"creationTime": "2019-11-25T17:43:41.085Z",
"modifiedTime": "2019-11-25T17:43:41.085Z",
"status": "ACTIVE",
"workspaceUsers": [
{
"userId": 0,
"role": "OWNER",
"status": "ACTIVE"
},
{
"userId": 1,
"role": "OWNER",
"status": "ACTIVE"
}
],
"creator": {
"userId": 1,
"givenName": "aaa",
"familyName": "bbb"
},
"excludeFromPublicDirectory": False,
"ethicalLegalSocialImplications": True,
"diseaseFocusedResearch": True,
"diseaseFocusedResearchName": "disease focused research name str",
"otherPurposeDetails": "other purpose details str",
"methodsDevelopment": True,
"controlSet": True,
"ancestry": True,
"accessTier": "REGISTERED",
"socialBehavioral": True,
"populationHealth": True,
"drugDevelopment": True,
"commercialPurpose": True,
"educational": True,
"otherPurpose": True,
"scientificApproaches": 'reasonForInvestigation string',
"intendToStudy": 'intendToStudy string',
"findingsFromStudy": 'findingsFromStudy string',
"focusOnUnderrepresentedPopulations": True,
"workspaceDemographic": {
"raceEthnicity": ['AIAN', 'MENA'],
"age": ['AGE_0_11', 'AGE_65_74'],
"sexAtBirth": "UNSET",
"genderIdentity": "OTHER_THAN_MAN_WOMAN",
"sexualOrientation": "OTHER_THAN_STRAIGHT",
"geography": "RURAL",
"disabilityStatus": "DISABILITY",
"accessToCare": "NOT_EASILY_ACCESS_CARE",
"educationLevel": "LESS_THAN_HIGH_SCHOOL",
"incomeLevel": "BELOW_FEDERAL_POVERTY_LEVEL_200_PERCENT",
"others": "string"
}
},
{
"workspaceId": 1,
"name": "workspace name str 2",
"creationTime": "2019-11-25T17:43:41.085Z",
"modifiedTime": "2019-11-25T17:43:41.085Z",
"status": "INACTIVE",
"workspaceUsers": [
{
"userId": 0,
"role": "OWNER",
"status": "ACTIVE"
},
{
"userId": 1,
"role": "READER",
"status": "ACTIVE"
}
],
"creator": {
"userId": 0,
"givenName": "aaa",
"familyName": "bbb"
},
"excludeFromPublicDirectory": False,
"ethicalLegalSocialImplications": False,
"diseaseFocusedResearch": True,
"diseaseFocusedResearchName": "disease focused research name str 2",
"otherPurposeDetails": "other purpose details str 2",
"methodsDevelopment": False,
"controlSet": False,
"ancestry": False,
"socialBehavioral": False,
"populationHealth": False,
"drugDevelopment": False,
"commercialPurpose": False,
"educational": False,
"otherPurpose": False,
"scientificApproaches": 'reasonForInvestigation string2',
"intendToStudy": 'intendToStudy string2',
"findingsFromStudy": 'findingsFromStudy string2'
}
]
now = clock.CLOCK.now()
sequest_hours_ago = now - timedelta(hours=24)
with FakeClock(sequest_hours_ago):
self.send_post('workbench/directory/workspaces', request_data=request_json)
# test get research projects directory before review
result = self.send_get('researchHub/projectDirectory')
self.assertEqual(len(result['data']), 2)
self.assertIn({'workspaceId': 0, 'snapshotId': 1, 'name': 'workspace name str',
'creationTime': '2019-11-25T17:43:41.085000',
'modifiedTime': '2019-11-25T17:43:41.085000', 'status': 'ACTIVE',
'workspaceUsers': [
{'userId': 0, 'userName': 'given name 1 family name 1', 'degree': ['PHD', 'MPH'],
'affiliations': [
{'institution': 'institution1', 'role': 'institution role 1', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'display name', 'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}]},
{'userId': 1, 'userName': 'given name 2 family name 2', 'degree': ['PHD', 'MPH'],
'affiliations': [
{'institution': 'institution2', 'role': 'institution role 2', 'isVerified': None,
'nonAcademicAffiliation': 'UNSET'},
{'institution': 'institution22', 'role': 'institution role 22', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'verified institution', 'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}
]}
],
'workspaceOwner': [
{'userId': 0, 'userName': 'given name 1 family name 1',
'degree': ['PHD', 'MPH'],
'affiliations': [{'institution': 'institution1', 'role': 'institution role 1',
'isVerified': None, 'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'display name', 'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}]},
{'userId': 1, 'userName': 'given name 2 family name 2',
'degree': ['PHD', 'MPH'],
'affiliations': [{'institution': 'institution2', 'role': 'institution role 2',
'isVerified': None, 'nonAcademicAffiliation': 'UNSET'},
{'institution': 'institution22', 'role': 'institution role 22',
'isVerified': None, 'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'verified institution',
'role': 'verified institution role 1', 'isVerified': True,
'nonAcademicAffiliation': 'UNSET'}]}],
'hasVerifiedInstitution': True,
'excludeFromPublicDirectory': False, 'ethicalLegalSocialImplications': True,
'reviewRequested': False, 'diseaseFocusedResearch': True,
'diseaseFocusedResearchName': 'disease focused research name str',
'otherPurposeDetails': 'other purpose details str', 'methodsDevelopment': True,
'controlSet': True, 'ancestry': True, 'socialBehavioral': True, 'populationHealth': True,
'drugDevelopment': True, 'commercialPurpose': True, 'educational': True, 'otherPurpose': True,
'scientificApproaches': 'reasonForInvestigation string',
'intendToStudy': 'intendToStudy string',
'findingsFromStudy': 'findingsFromStudy string',
'focusOnUnderrepresentedPopulations': True,
'accessTier': 'REGISTERED',
'workspaceDemographic': {
"raceEthnicity": ['AIAN', 'MENA'],
"age": ['AGE_0_11', 'AGE_65_74'],
"sexAtBirth": None,
"genderIdentity": "OTHER_THAN_MAN_WOMAN",
"sexualOrientation": "OTHER_THAN_STRAIGHT",
"geography": "RURAL",
"disabilityStatus": "DISABILITY",
"accessToCare": "NOT_EASILY_ACCESS_CARE",
"educationLevel": "LESS_THAN_HIGH_SCHOOL",
"incomeLevel": "BELOW_FEDERAL_POVERTY_LEVEL_200_PERCENT",
"others": "string"
},
'cdrVersion': None
},
result['data'])
self.assertIn({'workspaceId': 1, 'snapshotId': 2, 'name': 'workspace name str 2',
'creationTime': '2019-11-25T17:43:41.085000',
'modifiedTime': '2019-11-25T17:43:41.085000', 'status': 'INACTIVE',
'workspaceUsers': [
{'userId': 0, 'userName': 'given name 1 family name 1', 'degree': ['PHD', 'MPH'],
'affiliations': [
{'institution': 'institution1', 'role': 'institution role 1', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'display name', 'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}
]},
{'userId': 1, 'userName': 'given name 2 family name 2', 'degree': ['PHD', 'MPH'],
'affiliations': [
{'institution': 'institution2', 'role': 'institution role 2', 'isVerified': None,
'nonAcademicAffiliation': 'UNSET'},
{'institution': 'institution22', 'role': 'institution role 22', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'verified institution', 'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}
]}
],
'workspaceOwner': [{'userId': 0, 'userName': 'given name 1 family name 1',
'degree': ['PHD', 'MPH'],
'affiliations': [{'institution': 'institution1',
'role': 'institution role 1',
'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'display name',
'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}]}],
'hasVerifiedInstitution': True,
'excludeFromPublicDirectory': False, 'ethicalLegalSocialImplications': False,
'reviewRequested': False, 'diseaseFocusedResearch': True,
'diseaseFocusedResearchName': 'disease focused research name str 2',
'otherPurposeDetails': 'other purpose details str 2', 'methodsDevelopment': False,
'controlSet': False, 'ancestry': False, 'socialBehavioral': False, 'populationHealth': False,
'drugDevelopment': False, 'commercialPurpose': False, 'educational': False,
'otherPurpose': False, 'scientificApproaches': 'reasonForInvestigation string2',
'intendToStudy': 'intendToStudy string2',
'findingsFromStudy': 'findingsFromStudy string2',
'focusOnUnderrepresentedPopulations': None,
'accessTier': 'UNSET',
'workspaceDemographic': {
"raceEthnicity": None,
"age": None,
"sexAtBirth": None,
"genderIdentity": None,
"sexualOrientation": None,
"geography": None,
"disabilityStatus": None,
"accessToCare": None,
"educationLevel": None,
"incomeLevel": None,
"others": None
},
'cdrVersion': None
},
result['data'])
# test audit review
review_results = [
{
"snapshotId": 1,
"auditorEmail": "auditor_email_1",
"reviewType": "RAB",
"displayDecision": "PUBLISH_TO_RESEARCHER_DIRECTORY",
"accessDecision": None,
"auditorNotes": "note1"
},
{
"snapshotId": 2,
"auditorEmail": "auditor_email_2",
"reviewType": "RAB",
"displayDecision": "EXCLUDE_FROM_RESEARCHER_DIRECTORY",
"accessDecision": "DISABLE_WORKSPACE",
"auditorNotes": "note2"
}
]
result = self.send_post('workbench/audit/workspace/results', review_results)
self.assertIn({'snapshotId': 1, 'auditorEmail': 'auditor_email_1', 'reviewType': 'RAB',
'displayDecision': 'PUBLISH_TO_RESEARCHER_DIRECTORY', 'accessDecision': 'UNSET',
'auditorNotes': 'note1'}, result)
self.assertIn({'snapshotId': 2, 'auditorEmail': 'auditor_email_2', 'reviewType': 'RAB',
'displayDecision': 'EXCLUDE_FROM_RESEARCHER_DIRECTORY',
'accessDecision': 'DISABLE_WORKSPACE', 'auditorNotes': 'note2'}, result)
# test get research projects directory after review
result = self.send_get('researchHub/projectDirectory')
self.assertEqual(len(result['data']), 1)
self.assertIn({'workspaceId': 0, 'snapshotId': 1, 'name': 'workspace name str',
'creationTime': '2019-11-25T17:43:41.085000',
'modifiedTime': '2019-11-25T17:43:41.085000', 'status': 'ACTIVE',
'workspaceUsers': [
{'userId': 0, 'userName': 'given name 1 family name 1', 'degree': ['PHD', 'MPH'],
'affiliations': [
{'institution': 'institution1', 'role': 'institution role 1', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'display name', 'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}
]},
{'userId': 1, 'userName': 'given name 2 family name 2', 'degree': ['PHD', 'MPH'],
'affiliations': [
{'institution': 'institution2', 'role': 'institution role 2', 'isVerified': None,
'nonAcademicAffiliation': 'UNSET'},
{'institution': 'institution22', 'role': 'institution role 22', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'verified institution', 'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}
]}
],
'workspaceOwner': [
{'userId': 0, 'userName': 'given name 1 family name 1', 'degree': ['PHD', 'MPH'],
'affiliations': [
{'institution': 'institution1', 'role': 'institution role 1', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'display name', 'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}
]},
{'userId': 1, 'userName': 'given name 2 family name 2', 'degree': ['PHD', 'MPH'],
'affiliations': [{'institution': 'institution2', 'role': 'institution role 2',
'isVerified': None, 'nonAcademicAffiliation': 'UNSET'},
{'institution': 'institution22', 'role': 'institution role 22',
'isVerified': None, 'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'verified institution',
'role': 'verified institution role 1', 'isVerified': True,
'nonAcademicAffiliation': 'UNSET'}]}],
'hasVerifiedInstitution': True,
'excludeFromPublicDirectory': False, 'ethicalLegalSocialImplications': True,
'reviewRequested': False, 'diseaseFocusedResearch': True,
'diseaseFocusedResearchName': 'disease focused research name str',
'otherPurposeDetails': 'other purpose details str', 'methodsDevelopment': True,
'controlSet': True, 'ancestry': True, 'socialBehavioral': True, 'populationHealth': True,
'drugDevelopment': True, 'commercialPurpose': True, 'educational': True, 'otherPurpose': True,
'scientificApproaches': 'reasonForInvestigation string',
'intendToStudy': 'intendToStudy string',
'findingsFromStudy': 'findingsFromStudy string',
'focusOnUnderrepresentedPopulations': True,
'accessTier': 'REGISTERED',
'workspaceDemographic': {
"raceEthnicity": ['AIAN', 'MENA'],
"age": ['AGE_0_11', 'AGE_65_74'],
"sexAtBirth": None,
"genderIdentity": "OTHER_THAN_MAN_WOMAN",
"sexualOrientation": "OTHER_THAN_STRAIGHT",
"geography": "RURAL",
"disabilityStatus": "DISABILITY",
"accessToCare": "NOT_EASILY_ACCESS_CARE",
"educationLevel": "LESS_THAN_HIGH_SCHOOL",
"incomeLevel": "BELOW_FEDERAL_POVERTY_LEVEL_200_PERCENT",
"others": "string"
},
'cdrVersion': None
},
result['data'])
# test get research projects directory with status
result = self.send_get('researchHub/projectDirectory?status=ACTIVE')
self.assertEqual(len(result['data']), 1)
self.assertIn({'workspaceId': 0, 'snapshotId': 1, 'name': 'workspace name str',
'creationTime': '2019-11-25T17:43:41.085000',
'modifiedTime': '2019-11-25T17:43:41.085000', 'status': 'ACTIVE',
'workspaceUsers': [
{'userId': 0, 'userName': 'given name 1 family name 1', 'degree': ['PHD', 'MPH'],
'affiliations': [
{'institution': 'institution1', 'role': 'institution role 1', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'display name', 'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}
]},
{'userId': 1, 'userName': 'given name 2 family name 2', 'degree': ['PHD', 'MPH'],
'affiliations': [
{'institution': 'institution2', 'role': 'institution role 2', 'isVerified': None,
'nonAcademicAffiliation': 'UNSET'},
{'institution': 'institution22', 'role': 'institution role 22', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'verified institution', 'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}
]}
],
'workspaceOwner': [
{'userId': 0, 'userName': 'given name 1 family name 1', 'degree': ['PHD', 'MPH'],
'affiliations': [
{'institution': 'institution1', 'role': 'institution role 1', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'display name', 'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}
]},
{'userId': 1, 'userName': 'given name 2 family name 2', 'degree': ['PHD', 'MPH'],
'affiliations': [{'institution': 'institution2', 'role': 'institution role 2',
'isVerified': None, 'nonAcademicAffiliation': 'UNSET'},
{'institution': 'institution22', 'role': 'institution role 22',
'isVerified': None, 'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'verified institution',
'role': 'verified institution role 1', 'isVerified': True,
'nonAcademicAffiliation': 'UNSET'}]}],
'hasVerifiedInstitution': True,
'excludeFromPublicDirectory': False, 'ethicalLegalSocialImplications': True,
'reviewRequested': False, 'diseaseFocusedResearch': True,
'diseaseFocusedResearchName': 'disease focused research name str',
'otherPurposeDetails': 'other purpose details str', 'methodsDevelopment': True,
'controlSet': True, 'ancestry': True, 'socialBehavioral': True, 'populationHealth': True,
'drugDevelopment': True, 'commercialPurpose': True, 'educational': True, 'otherPurpose': True,
'scientificApproaches': 'reasonForInvestigation string',
'intendToStudy': 'intendToStudy string',
'findingsFromStudy': 'findingsFromStudy string',
'focusOnUnderrepresentedPopulations': True,
'accessTier': 'REGISTERED',
'workspaceDemographic': {
"raceEthnicity": ['AIAN', 'MENA'],
"age": ['AGE_0_11', 'AGE_65_74'],
"sexAtBirth": None,
"genderIdentity": "OTHER_THAN_MAN_WOMAN",
"sexualOrientation": "OTHER_THAN_STRAIGHT",
"geography": "RURAL",
"disabilityStatus": "DISABILITY",
"accessToCare": "NOT_EASILY_ACCESS_CARE",
"educationLevel": "LESS_THAN_HIGH_SCHOOL",
"incomeLevel": "BELOW_FEDERAL_POVERTY_LEVEL_200_PERCENT",
"others": "string"
},
'cdrVersion': None
},
result['data'])
# change audit review result
review_results = [
{
"snapshotId": 1,
"auditorEmail": "auditor_email_1",
"reviewType": "RAB",
"displayDecision": "EXCLUDE_FROM_RESEARCHER_DIRECTORY",
"accessDecision": 'DISABLE_WORKSPACE',
"auditorNotes": "note1"
},
{
"snapshotId": 2,
"auditorEmail": "auditor_email_2",
"reviewType": "RAB",
"displayDecision": "PUBLISH_TO_RESEARCHER_DIRECTORY",
"accessDecision": None,
"auditorNotes": "note2"
}
]
self.send_post('workbench/audit/workspace/results', review_results)
result = self.send_get('researchHub/projectDirectory')
self.assertEqual(len(result['data']), 1)
self.assertIn({'workspaceId': 1, 'snapshotId': 2, 'name': 'workspace name str 2',
'creationTime': '2019-11-25T17:43:41.085000',
'modifiedTime': '2019-11-25T17:43:41.085000', 'status': 'INACTIVE',
'workspaceUsers': [
{'userId': 0, 'userName': 'given name 1 family name 1', 'degree': ['PHD', 'MPH'],
'affiliations': [
{'institution': 'institution1', 'role': 'institution role 1', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'display name', 'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}
]},
{'userId': 1, 'userName': 'given name 2 family name 2', 'degree': ['PHD', 'MPH'],
'affiliations': [
{'institution': 'institution2', 'role': 'institution role 2', 'isVerified': None,
'nonAcademicAffiliation': 'UNSET'},
{'institution': 'institution22', 'role': 'institution role 22', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'verified institution', 'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}
]}
],
'workspaceOwner': [{'userId': 0, 'userName': 'given name 1 family name 1',
'degree': ['PHD', 'MPH'],
'affiliations': [{'institution': 'institution1',
'role': 'institution role 1',
'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'display name',
'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}]}],
'hasVerifiedInstitution': True,
'excludeFromPublicDirectory': False, 'ethicalLegalSocialImplications': False,
'reviewRequested': False, 'diseaseFocusedResearch': True,
'diseaseFocusedResearchName': 'disease focused research name str 2',
'otherPurposeDetails': 'other purpose details str 2', 'methodsDevelopment': False,
'controlSet': False, 'ancestry': False, 'socialBehavioral': False, 'populationHealth': False,
'drugDevelopment': False, 'commercialPurpose': False, 'educational': False,
'otherPurpose': False, 'scientificApproaches': 'reasonForInvestigation string2',
'intendToStudy': 'intendToStudy string2',
'findingsFromStudy': 'findingsFromStudy string2',
'focusOnUnderrepresentedPopulations': None,
'accessTier': 'UNSET',
'workspaceDemographic': {
"raceEthnicity": None,
"age": None,
"sexAtBirth": None,
"genderIdentity": None,
"sexualOrientation": None,
"geography": None,
"disabilityStatus": None,
"accessToCare": None,
"educationLevel": None,
"incomeLevel": None,
"others": None
},
'cdrVersion': None
},
result['data'])
def test_get_research_projects_directory_search_and_filter(self):
# create researchers
researchers_json = [
{
"userId": 0,
"creationTime": "2019-11-26T21:21:13.056Z",
"modifiedTime": "2019-11-26T21:21:13.056Z",
"givenName": "givenname1",
"familyName": "familyname1",
"streetAddress1": "string",
"streetAddress2": "string",
"city": "string",
"state": "string",
"zipCode": "string",
"country": "string",
"ethnicity": "HISPANIC",
"gender": ["MAN"],
"race": ["AIAN"],
"degree": ["PHD", "MPH"],
"sexAtBirth": ["FEMALE"],
"sexualOrientation": "BISEXUAL",
"affiliations": [
{
"institution": "institution1",
"role": "institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
],
"verifiedInstitutionalAffiliation": {
"institutionDisplayName": "display name",
"institutionShortName": "verified institution",
"institutionalRole": "verified institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
},
{
"userId": 1,
"creationTime": "2019-11-27T21:21:13.056Z",
"modifiedTime": "2019-11-27T21:21:13.056Z",
"givenName": "givenname2",
"familyName": "familyname2",
"streetAddress1": "string2",
"streetAddress2": "string2",
"city": "string2",
"state": "string2",
"zipCode": "string2",
"country": "string2",
"ethnicity": "HISPANIC",
"sexualOrientation": "BISEXUAL",
"gender": ["MAN", "WOMAN"],
"race": ["AIAN", "WHITE"],
"degree": ["PHD", "MPH"],
"affiliations": [
{
"institution": "institution2",
"role": "institution role 2"
},
{
"institution": "institution22",
"role": "institution role 22",
"nonAcademicAffiliation": "INDUSTRY"
}
],
"verifiedInstitutionalAffiliation": {
"institutionShortName": "verified institution",
"institutionalRole": "verified institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
}
]
self.send_post('workbench/directory/researchers', request_data=researchers_json)
# create workspace
request_json = [
{
"workspaceId": 0,
"name": "workspace name str Search test",
"creationTime": "2019-11-25T17:43:41.085Z",
"modifiedTime": "2019-11-25T17:43:41.085Z",
"status": "ACTIVE",
"workspaceUsers": [
{
"userId": 1,
"role": "OWNER",
"status": "ACTIVE"
}
],
"creator": {
"userId": 1,
"givenName": "aaa",
"familyName": "bbb"
},
"excludeFromPublicDirectory": False,
"ethicalLegalSocialImplications": True,
"diseaseFocusedResearch": True,
"diseaseFocusedResearchName": "disease focused research name str",
"otherPurposeDetails": "other purpose details str",
"methodsDevelopment": True,
"controlSet": True,
"ancestry": True,
"socialBehavioral": False,
"populationHealth": True,
"drugDevelopment": True,
"commercialPurpose": True,
"educational": True,
"otherPurpose": True,
"scientificApproaches": 'reasonForInvestigation string',
"intendToStudy": 'intendToStudy string',
"findingsFromStudy": 'findingsFromStudy string',
"focusOnUnderrepresentedPopulations": True,
"workspaceDemographic": {
"raceEthnicity": ['AIAN', 'MENA'],
"age": ['AGE_0_11', 'AGE_65_74'],
"sexAtBirth": "UNSET",
"genderIdentity": "OTHER_THAN_MAN_WOMAN",
"sexualOrientation": "OTHER_THAN_STRAIGHT",
"geography": "RURAL",
"disabilityStatus": "DISABILITY",
"accessToCare": "NOT_EASILY_ACCESS_CARE",
"educationLevel": "LESS_THAN_HIGH_SCHOOL",
"incomeLevel": "BELOW_FEDERAL_POVERTY_LEVEL_200_PERCENT",
"others": "string"
}
},
{
"workspaceId": 1,
"name": "workspace name str 2",
"creationTime": "2019-11-25T17:43:41.085Z",
"modifiedTime": "2019-11-25T17:43:41.085Z",
"status": "INACTIVE",
"workspaceUsers": [
{
"userId": 0,
"role": "OWNER",
"status": "ACTIVE"
},
{
"userId": 1,
"role": "READER",
"status": "ACTIVE"
}
],
"creator": {
"userId": 0,
"givenName": "aaa",
"familyName": "bbb"
},
"excludeFromPublicDirectory": False,
"ethicalLegalSocialImplications": False,
"diseaseFocusedResearch": True,
"diseaseFocusedResearchName": "disease focused research name str 2",
"otherPurposeDetails": "other purpose details str 2",
"methodsDevelopment": False,
"controlSet": False,
"ancestry": False,
"socialBehavioral": False,
"populationHealth": False,
"drugDevelopment": False,
"commercialPurpose": False,
"educational": False,
"otherPurpose": False,
"scientificApproaches": 'reasonForInvestigation string2',
"intendToStudy": 'intendToStudy string2',
"findingsFromStudy": 'findingsFromStudy string2'
}
]
now = clock.CLOCK.now()
sequest_hours_ago = now - timedelta(hours=24)
with FakeClock(sequest_hours_ago):
self.send_post('workbench/directory/workspaces', request_data=request_json)
result = self.send_get('researchHub/projectDirectory?status=ACTIVE')
self.assertEqual(len(result['data']), 1)
# test search by project purpose
result = self.send_get('researchHub/projectDirectory?projectPurpose=controlSet')
self.assertEqual(result['totalActiveProjects'], 1)
self.assertEqual(result['totalMatchedRecords'], 1)
self.assertEqual(len(result['data']), 1)
# test search by multiple project purpose
result = self.send_get('researchHub/projectDirectory?projectPurpose=controlSet,socialBehavioral')
self.assertEqual(len(result['data']), 0)
# test search by workspace name
result = self.send_get('researchHub/projectDirectory?workspaceNameLike=Search%20test')
self.assertEqual(len(result['data']), 1)
# test search by workspace intendToStudy
result = self.send_get('researchHub/projectDirectory?intendToStudyLike=string2')
self.assertEqual(len(result['data']), 1)
# test search by generalized parameter workspaceLike
result = self.send_get('researchHub/projectDirectory?workspaceLike=str')
self.assertEqual(result['totalActiveProjects'], 1)
self.assertEqual(result['totalMatchedRecords'], 2)
self.assertEqual(len(result['data']), 2)
result = self.send_get('researchHub/projectDirectory?workspaceLike=string2')
self.assertEqual(len(result['data']), 1)
# test parameter "workspaceLike" will overwrite "intendToStudyLike"
result = self.send_get('researchHub/projectDirectory?workspaceLike=str&intendToStudyLike=string2')
self.assertEqual(len(result['data']), 2)
# test search by owner given/family name
result = self.send_get('researchHub/projectDirectory?givenName=givenname1')
self.assertEqual(len(result['data']), 1)
result = self.send_get('researchHub/projectDirectory?givenName=givenname2')
self.assertEqual(len(result['data']), 1)
result = self.send_get('researchHub/projectDirectory?familyName=familyname1')
self.assertEqual(len(result['data']), 1)
result = self.send_get('researchHub/projectDirectory?familyName=familyname2')
self.assertEqual(len(result['data']), 1)
# test search by owner full name
result = self.send_get('researchHub/projectDirectory?ownerName=nname1%20fami')
self.assertEqual(len(result['data']), 1)
# test search by user id
result = self.send_get('researchHub/projectDirectory?userId=1&userRole=owner')
self.assertEqual(result['totalActiveProjects'], 1)
self.assertEqual(result['totalMatchedRecords'], 1)
self.assertEqual(len(result['data']), 1)
result = self.send_get('researchHub/projectDirectory?userId=1&userRole=member')
self.assertEqual(len(result['data']), 1)
result = self.send_get('researchHub/projectDirectory?userId=1&userRole=all')
self.assertEqual(len(result['data']), 2)
# test page and page size
result = self.send_get('researchHub/projectDirectory?page=1&pageSize=1')
self.assertEqual(result['totalActiveProjects'], 1)
self.assertEqual(result['totalMatchedRecords'], 2)
self.assertEqual(len(result['data']), 1)
result = self.send_get('researchHub/projectDirectory?page=2&pageSize=1')
self.assertEqual(len(result['data']), 1)
result = self.send_get('researchHub/projectDirectory?page=3&pageSize=1')
self.assertEqual(len(result['data']), 0)
result = self.send_get('researchHub/projectDirectory?page=1&pageSize=2')
self.assertEqual(result['totalMatchedRecords'], 2)
self.assertEqual(len(result['data']), 2)
def test_get_research_projects_directory_less_than_23_hours(self):
# create researchers
researchers_json = [
{
"userId": 0,
"creationTime": "2019-11-26T21:21:13.056Z",
"modifiedTime": "2019-11-26T21:21:13.056Z",
"givenName": "given name 1",
"familyName": "family name 1",
"streetAddress1": "string",
"streetAddress2": "string",
"city": "string",
"state": "string",
"zipCode": "string",
"country": "string",
"ethnicity": "HISPANIC",
"gender": ["MAN"],
"race": ["AIAN"],
"sexAtBirth": ["FEMALE"],
"sexualOrientation": "BISEXUAL",
"affiliations": [
{
"institution": "institution1",
"role": "institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
],
"verifiedInstitutionalAffiliation": {
"institutionShortName": "verified institution",
"institutionalRole": "verified institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
}
]
self.send_post('workbench/directory/researchers', request_data=researchers_json)
# create workspace
request_json = [
{
"workspaceId": 0,
"name": "workspace name str",
"creationTime": "2019-11-25T17:43:41.085Z",
"modifiedTime": "2019-11-25T17:43:41.085Z",
"status": "ACTIVE",
"workspaceUsers": [
{
"userId": 0,
"role": "READER",
"status": "ACTIVE"
}
],
"excludeFromPublicDirectory": False,
"diseaseFocusedResearch": True,
"diseaseFocusedResearchName": "disease focused research name str",
"otherPurposeDetails": "other purpose details str",
"methodsDevelopment": True,
"controlSet": True,
"ancestry": True,
"socialBehavioral": True,
"populationHealth": True,
"drugDevelopment": True,
"commercialPurpose": True,
"educational": True,
"otherPurpose": True,
"scientificApproaches": 'reasonForInvestigation string',
"intendToStudy": 'intendToStudy string',
"findingsFromStudy": 'findingsFromStudy string',
"focusOnUnderrepresentedPopulations": True,
"workspaceDemographic": {
"raceEthnicity": ['AIAN', 'MENA'],
"age": ['AGE_0_11', 'AGE_65_74'],
"sexAtBirth": "UNSET",
"genderIdentity": "OTHER_THAN_MAN_WOMAN",
"sexualOrientation": "OTHER_THAN_STRAIGHT",
"geography": "RURAL",
"disabilityStatus": "DISABILITY",
"accessToCare": "NOT_EASILY_ACCESS_CARE",
"educationLevel": "LESS_THAN_HIGH_SCHOOL",
"incomeLevel": "BELOW_FEDERAL_POVERTY_LEVEL_200_PERCENT",
"others": "string"
}
}
]
now = clock.CLOCK.now()
sequest_hours_ago = now - timedelta(hours=22)
with FakeClock(sequest_hours_ago):
self.send_post('workbench/directory/workspaces', request_data=request_json)
# test get research projects directory before review
result = self.send_get('researchHub/projectDirectory')
self.assertEqual(len(result['data']), 0)
def test_workspace_audit_sync_api(self):
# create researchers
researchers_json = [
{
"userId": 0,
"creationTime": "2019-11-26T21:21:13.056Z",
"modifiedTime": "2019-11-26T21:21:13.056Z",
"givenName": "given name 1",
"familyName": "family name 1",
"streetAddress1": "string",
"streetAddress2": "string",
"city": "string",
"state": "string",
"zipCode": "string",
"country": "string",
"ethnicity": "HISPANIC",
"gender": ["MAN"],
"race": ["AIAN"],
"sexAtBirth": ["FEMALE"],
"sexualOrientation": "BISEXUAL",
"affiliations": [
{
"institution": "institution1",
"role": "institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
]
},
{
"userId": 1,
"creationTime": "2019-11-27T21:21:13.056Z",
"modifiedTime": "2019-11-27T21:21:13.056Z",
"givenName": "given name 2",
"familyName": "family name 2",
"streetAddress1": "string2",
"streetAddress2": "string2",
"city": "string2",
"state": "string2",
"zipCode": "string2",
"country": "string2",
"ethnicity": "HISPANIC",
"sexualOrientation": "BISEXUAL",
"gender": ["MAN", "WOMAN"],
"race": ["AIAN", "WHITE"],
"affiliations": [
{
"institution": "institution2",
"role": "institution role 2"
},
{
"institution": "institution22",
"role": "institution role 22",
"nonAcademicAffiliation": "INDUSTRY"
}
]
}
]
self.send_post('workbench/directory/researchers', request_data=researchers_json)
# create workspace
cdr_version = 'irving'
request_json = [
{
"workspaceId": 0,
"name": "workspace name str",
"creationTime": "2019-11-25T17:43:41.085Z",
"modifiedTime": "2019-11-25T17:43:41.085Z",
"status": "ACTIVE",
"workspaceUsers": [
{
"userId": 0,
"role": "READER",
"status": "ACTIVE"
},
{
"userId": 1,
"role": "OWNER",
"status": "ACTIVE"
}
],
"creator": {
"userId": 0,
"givenName": "aaa",
"familyName": "bbb"
},
"excludeFromPublicDirectory": False,
"ethicalLegalSocialImplications": True,
"diseaseFocusedResearch": True,
"diseaseFocusedResearchName": "disease focused research name str",
"otherPurposeDetails": "other purpose details str",
"methodsDevelopment": True,
"controlSet": True,
"ancestry": True,
"socialBehavioral": True,
"populationHealth": True,
"drugDevelopment": True,
"commercialPurpose": True,
"educational": True,
"otherPurpose": True,
"scientificApproaches": 'reasonForInvestigation string',
"intendToStudy": 'intendToStudy string',
"findingsFromStudy": 'findingsFromStudy string',
"focusOnUnderrepresentedPopulations": True,
"workspaceDemographic": {
"raceEthnicity": ['AIAN', 'MENA'],
"age": ['AGE_0_11', 'AGE_65_74'],
"sexAtBirth": "UNSET",
"genderIdentity": "OTHER_THAN_MAN_WOMAN",
"sexualOrientation": "OTHER_THAN_STRAIGHT",
"geography": "RURAL",
"disabilityStatus": "DISABILITY",
"accessToCare": "NOT_EASILY_ACCESS_CARE",
"educationLevel": "LESS_THAN_HIGH_SCHOOL",
"incomeLevel": "BELOW_FEDERAL_POVERTY_LEVEL_200_PERCENT",
"others": "string"
},
"cdrVersionName": cdr_version
},
{
"workspaceId": 1,
"name": "workspace name str 2",
"creationTime": "2019-11-25T17:43:41.085Z",
"modifiedTime": "2019-11-25T17:43:41.085Z",
"status": "INACTIVE",
"workspaceUsers": [
{
"userId": 0,
"role": "OWNER",
"status": "ACTIVE"
},
{
"userId": 1,
"role": "READER",
"status": "ACTIVE"
}
],
"excludeFromPublicDirectory": False,
"ethicalLegalSocialImplications": False,
"diseaseFocusedResearch": True,
"diseaseFocusedResearchName": "disease focused research name str 2",
"otherPurposeDetails": "other purpose details str 2",
"methodsDevelopment": False,
"controlSet": False,
"ancestry": False,
"socialBehavioral": False,
"populationHealth": False,
"drugDevelopment": False,
"commercialPurpose": False,
"educational": False,
"otherPurpose": False,
"scientificApproaches": 'reasonForInvestigation string2',
"intendToStudy": 'intendToStudy string2',
"findingsFromStudy": 'findingsFromStudy string2',
"cdrVersionName": cdr_version
}
]
self.send_post('workbench/directory/workspaces', request_data=request_json)
# test workbench audit
result = self.send_get('workbench/audit/workspace/snapshots')
self.assertIn({'snapshotId': 1, 'workspaceId': 0, 'name': 'workspace name str',
'creationTime': '2019-11-25T17:43:41.085000', 'modifiedTime': '2019-11-25T17:43:41.085000',
'status': 'ACTIVE',
'workspaceUsers': [{'userId': 0, 'role': 'READER', 'status': 'ACTIVE', 'isCreator': True},
{'userId': 1, 'role': 'OWNER', 'status': 'ACTIVE', 'isCreator': False}],
'workspaceResearchers': [
{'userId': 0, 'creationTime': '2019-11-26T21:21:13.056000',
'modifiedTime': '2019-11-26T21:21:13.056000', 'givenName': 'given name 1',
'familyName': 'family name 1', 'email': None, 'verifiedInstitutionalAffiliation': {},
'affiliations': [
{'institution': 'institution1', 'role': 'institution role 1', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'}
]},
{'userId': 1, 'creationTime': '2019-11-27T21:21:13.056000',
'modifiedTime': '2019-11-27T21:21:13.056000', 'givenName': 'given name 2',
'familyName': 'family name 2', 'email': None, 'verifiedInstitutionalAffiliation': {},
'affiliations': [
{'institution': 'institution2', 'role': 'institution role 2',
'isVerified': None, 'nonAcademicAffiliation': 'UNSET'},
{'institution': 'institution22', 'role': 'institution role 22', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'}
]}],
'excludeFromPublicDirectory': False, 'ethicalLegalSocialImplications': True,
'reviewRequested': False, 'diseaseFocusedResearch': True,
'diseaseFocusedResearchName': 'disease focused research name str',
'otherPurposeDetails': 'other purpose details str', 'methodsDevelopment': True,
'controlSet': True, 'ancestry': True, 'socialBehavioral': True, 'populationHealth': True,
'drugDevelopment': True, 'commercialPurpose': True, 'educational': True, 'otherPurpose': True,
'scientificApproaches': 'reasonForInvestigation string', 'intendToStudy': 'intendToStudy string',
'findingsFromStudy': 'findingsFromStudy string', 'focusOnUnderrepresentedPopulations': True,
'accessTier': 'UNSET',
'workspaceDemographic': {
'raceEthnicity': ['AIAN', 'MENA'], 'age': ['AGE_0_11', 'AGE_65_74'],
'sexAtBirth': None, 'genderIdentity': 'OTHER_THAN_MAN_WOMAN',
'sexualOrientation': 'OTHER_THAN_STRAIGHT', 'geography': 'RURAL',
'disabilityStatus': 'DISABILITY', 'accessToCare': 'NOT_EASILY_ACCESS_CARE',
'educationLevel': 'LESS_THAN_HIGH_SCHOOL',
'incomeLevel': 'BELOW_FEDERAL_POVERTY_LEVEL_200_PERCENT',
'others': 'string'},
'cdrVersion': cdr_version
}, result)
self.assertIn({'snapshotId': 2, 'workspaceId': 1, 'name': 'workspace name str 2',
'creationTime': '2019-11-25T17:43:41.085000', 'modifiedTime': '2019-11-25T17:43:41.085000',
'status': 'INACTIVE',
'workspaceUsers': [
{'userId': 0, 'role': 'OWNER', 'status': 'ACTIVE', 'isCreator': False},
{'userId': 1, 'role': 'READER', 'status': 'ACTIVE', 'isCreator': False}
],
'workspaceResearchers': [
{'userId': 0, 'creationTime': '2019-11-26T21:21:13.056000',
'modifiedTime': '2019-11-26T21:21:13.056000', 'givenName': 'given name 1',
'familyName': 'family name 1', 'email': None, 'verifiedInstitutionalAffiliation': {},
'affiliations': [
{'institution': 'institution1', 'role': 'institution role 1', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'}
]},
{'userId': 1, 'creationTime': '2019-11-27T21:21:13.056000',
'modifiedTime': '2019-11-27T21:21:13.056000', 'givenName': 'given name 2',
'familyName': 'family name 2', 'email': None, 'verifiedInstitutionalAffiliation': {},
'affiliations': [
{'institution': 'institution2', 'role': 'institution role 2', 'isVerified': None,
'nonAcademicAffiliation': 'UNSET'},
{'institution': 'institution22', 'role': 'institution role 22', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'}
]}],
'excludeFromPublicDirectory': False, 'ethicalLegalSocialImplications': False,
'reviewRequested': False, 'diseaseFocusedResearch': True,
'diseaseFocusedResearchName': 'disease focused research name str 2',
'otherPurposeDetails': 'other purpose details str 2', 'methodsDevelopment': False,
'controlSet': False, 'ancestry': False, 'socialBehavioral': False, 'populationHealth': False,
'drugDevelopment': False, 'commercialPurpose': False, 'educational': False,
'otherPurpose': False, 'scientificApproaches': 'reasonForInvestigation string2',
'intendToStudy': 'intendToStudy string2', 'findingsFromStudy': 'findingsFromStudy string2',
'focusOnUnderrepresentedPopulations': None, 'accessTier': 'UNSET',
'workspaceDemographic': {
'raceEthnicity': None, 'age': None, 'sexAtBirth': None, 'genderIdentity': None,
'sexualOrientation': None, 'geography': None, 'disabilityStatus': None,
'accessToCare': None, 'educationLevel': None, 'incomeLevel': None, 'others': None},
'cdrVersion': cdr_version
}, result)
result = self.send_get('workbench/audit/workspace/snapshots?last_snapshot_id=1')
self.assertEqual(len(result), 1)
self.assertIn({'snapshotId': 2, 'workspaceId': 1, 'name': 'workspace name str 2',
'creationTime': '2019-11-25T17:43:41.085000', 'modifiedTime': '2019-11-25T17:43:41.085000',
'status': 'INACTIVE',
'workspaceUsers': [
{'userId': 0, 'role': 'OWNER', 'status': 'ACTIVE', 'isCreator': False},
{'userId': 1, 'role': 'READER', 'status': 'ACTIVE', 'isCreator': False}
],
'workspaceResearchers': [
{'userId': 0, 'creationTime': '2019-11-26T21:21:13.056000',
'modifiedTime': '2019-11-26T21:21:13.056000', 'givenName': 'given name 1',
'familyName': 'family name 1', 'email': None, 'verifiedInstitutionalAffiliation': {},
'affiliations': [
{'institution': 'institution1', 'role': 'institution role 1', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'}
]},
{'userId': 1, 'creationTime': '2019-11-27T21:21:13.056000',
'modifiedTime': '2019-11-27T21:21:13.056000', 'givenName': 'given name 2',
'familyName': 'family name 2', 'email': None, 'verifiedInstitutionalAffiliation': {},
'affiliations': [
{'institution': 'institution2', 'role': 'institution role 2', 'isVerified': None,
'nonAcademicAffiliation': 'UNSET'},
{'institution': 'institution22', 'role': 'institution role 22', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'}
]}],
'excludeFromPublicDirectory': False, 'ethicalLegalSocialImplications': False,
'reviewRequested': False, 'diseaseFocusedResearch': True,
'diseaseFocusedResearchName': 'disease focused research name str 2',
'otherPurposeDetails': 'other purpose details str 2', 'methodsDevelopment': False,
'controlSet': False, 'ancestry': False, 'socialBehavioral': False, 'populationHealth': False,
'drugDevelopment': False, 'commercialPurpose': False, 'educational': False,
'otherPurpose': False, 'scientificApproaches': 'reasonForInvestigation string2',
'intendToStudy': 'intendToStudy string2', 'findingsFromStudy': 'findingsFromStudy string2',
'focusOnUnderrepresentedPopulations': None, 'accessTier': 'UNSET',
'workspaceDemographic': {
'raceEthnicity': None, 'age': None, 'sexAtBirth': None, 'genderIdentity': None,
'sexualOrientation': None, 'geography': None, 'disabilityStatus': None,
'accessToCare': None, 'educationLevel': None, 'incomeLevel': None, 'others': None},
'cdrVersion': cdr_version
}, result)
result = self.send_get('workbench/audit/workspace/snapshots?last_snapshot_id=2')
self.assertEqual(len(result), 0)
result = self.send_get('workbench/audit/workspace/snapshots?snapshot_id=1')
self.assertEqual(len(result), 1)
# test get latest snapshot by workspace id
updated_request_json = [
{
"workspaceId": 1,
"name": "workspace name str 3",
"creationTime": "2019-11-25T17:43:41.085Z",
"modifiedTime": "2019-11-26T17:43:41.085Z",
"status": "INACTIVE",
"workspaceUsers": [
{
"userId": 0,
"role": "OWNER",
"status": "ACTIVE"
},
{
"userId": 1,
"role": "READER",
"status": "ACTIVE"
}
],
"excludeFromPublicDirectory": False,
"ethicalLegalSocialImplications": False,
"diseaseFocusedResearch": True,
"diseaseFocusedResearchName": "disease focused research name str 2",
"otherPurposeDetails": "other purpose details str 2",
"methodsDevelopment": False,
"controlSet": False,
"ancestry": False,
"socialBehavioral": False,
"populationHealth": False,
"drugDevelopment": False,
"commercialPurpose": False,
"educational": False,
"otherPurpose": False,
"scientificApproaches": 'reasonForInvestigation string2',
"intendToStudy": 'intendToStudy string2',
"findingsFromStudy": 'findingsFromStudy string2',
"cdrVersionName": cdr_version
}
]
self.send_post('workbench/directory/workspaces', request_data=updated_request_json)
result = self.send_get('workbench/audit/workspace/snapshots')
self.assertEqual(len(result), 3)
result = self.send_get('workbench/audit/workspace/snapshots?workspace_id=1')
self.assertEqual(len(result), 1)
self.assertEqual(result[0]['name'], 'workspace name str 3')
def test_hide_workspace_without_verified_institution_from_RH(self):
# create researchers
researchers_json = [
{
"userId": 0,
"creationTime": "2019-11-26T21:21:13.056Z",
"modifiedTime": "2019-11-26T21:21:13.056Z",
"givenName": "given name 1",
"familyName": "family name 1",
"streetAddress1": "string",
"streetAddress2": "string",
"city": "string",
"state": "string",
"zipCode": "string",
"country": "string",
"ethnicity": "HISPANIC",
"gender": ["MAN"],
"race": ["AIAN"],
"sexAtBirth": ["FEMALE"],
"degree": ["PHD", "MPH"],
"sexualOrientation": "BISEXUAL",
"affiliations": [
{
"institution": "institution1",
"role": "institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
],
"verifiedInstitutionalAffiliation": {
"institutionShortName": "verified institution",
"institutionalRole": "verified institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
},
{
"userId": 1,
"creationTime": "2019-11-27T21:21:13.056Z",
"modifiedTime": "2019-11-27T21:21:13.056Z",
"givenName": "given name 2",
"familyName": "family name 2",
"streetAddress1": "string2",
"streetAddress2": "string2",
"city": "string2",
"state": "string2",
"zipCode": "string2",
"country": "string2",
"ethnicity": "HISPANIC",
"sexualOrientation": "BISEXUAL",
"gender": ["MAN", "WOMAN"],
"race": ["AIAN", "WHITE"],
"degree": ["PHD", "MPH"],
"affiliations": [
{
"institution": "institution2",
"role": "institution role 2"
},
{
"institution": "institution22",
"role": "institution role 22",
"nonAcademicAffiliation": "INDUSTRY"
}
]
}
]
self.send_post('workbench/directory/researchers', request_data=researchers_json)
# create workspace
request_json = [
{
"workspaceId": 0,
"name": "workspace name str",
"creationTime": "2019-11-25T17:43:41.085Z",
"modifiedTime": "2019-11-25T17:43:41.085Z",
"status": "ACTIVE",
"workspaceUsers": [
{
"userId": 1,
"role": "OWNER",
"status": "ACTIVE"
}
],
"excludeFromPublicDirectory": False,
"ethicalLegalSocialImplications": True,
"diseaseFocusedResearch": True,
"diseaseFocusedResearchName": "disease focused research name str",
"otherPurposeDetails": "other purpose details str",
"methodsDevelopment": True,
"controlSet": True,
"ancestry": True,
"socialBehavioral": True,
"populationHealth": True,
"drugDevelopment": True,
"commercialPurpose": True,
"educational": True,
"otherPurpose": True,
"scientificApproaches": 'reasonForInvestigation string',
"intendToStudy": 'intendToStudy string',
"findingsFromStudy": 'findingsFromStudy string',
"focusOnUnderrepresentedPopulations": True,
"workspaceDemographic": {
"raceEthnicity": ['AIAN', 'MENA'],
"age": ['AGE_0_11', 'AGE_65_74'],
"sexAtBirth": "UNSET",
"genderIdentity": "OTHER_THAN_MAN_WOMAN",
"sexualOrientation": "OTHER_THAN_STRAIGHT",
"geography": "RURAL",
"disabilityStatus": "DISABILITY",
"accessToCare": "NOT_EASILY_ACCESS_CARE",
"educationLevel": "LESS_THAN_HIGH_SCHOOL",
"incomeLevel": "BELOW_FEDERAL_POVERTY_LEVEL_200_PERCENT",
"others": "string"
}
},
{
"workspaceId": 1,
"name": "workspace name str 2",
"creationTime": "2019-11-25T17:43:41.085Z",
"modifiedTime": "2019-11-25T17:43:41.085Z",
"status": "INACTIVE",
"workspaceUsers": [
{
"userId": 0,
"role": "OWNER",
"status": "ACTIVE"
},
{
"userId": 1,
"role": "READER",
"status": "ACTIVE"
}
],
"creator": {
"userId": 0,
"givenName": "aaa",
"familyName": "bbb"
},
"excludeFromPublicDirectory": False,
"ethicalLegalSocialImplications": False,
"diseaseFocusedResearch": True,
"diseaseFocusedResearchName": "disease focused research name str 2",
"otherPurposeDetails": "other purpose details str 2",
"methodsDevelopment": False,
"controlSet": False,
"ancestry": False,
"socialBehavioral": False,
"populationHealth": False,
"drugDevelopment": False,
"commercialPurpose": False,
"educational": False,
"otherPurpose": False,
"scientificApproaches": 'reasonForInvestigation string2',
"intendToStudy": 'intendToStudy string2',
"findingsFromStudy": 'findingsFromStudy string2'
}
]
now = clock.CLOCK.now()
sequest_hours_ago = now - timedelta(hours=24)
with FakeClock(sequest_hours_ago):
self.send_post('workbench/directory/workspaces', request_data=request_json)
# test get research projects directory before review
result = self.send_get('researchHub/projectDirectory')
self.assertEqual(len(result['data']), 1)
self.assertIn({'workspaceId': 1, 'snapshotId': 2, 'name': 'workspace name str 2',
'creationTime': '2019-11-25T17:43:41.085000',
'modifiedTime': '2019-11-25T17:43:41.085000', 'status': 'INACTIVE',
'workspaceUsers': [
{'userId': 0, 'userName': 'given name 1 family name 1', 'degree': ['PHD', 'MPH'],
'affiliations': [
{'institution': 'institution1', 'role': 'institution role 1', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'verified institution', 'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}]},
{'userId': 1, 'userName': 'given name 2 family name 2', 'degree': ['PHD', 'MPH'],
'affiliations': [
{'institution': 'institution2', 'role': 'institution role 2', 'isVerified': None,
'nonAcademicAffiliation': 'UNSET'},
{'institution': 'institution22', 'role': 'institution role 22', 'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'}]}
],
'workspaceOwner': [{'userId': 0, 'userName': 'given name 1 family name 1',
'degree': ['PHD', 'MPH'],
'affiliations': [{'institution': 'institution1',
'role': 'institution role 1',
'isVerified': None,
'nonAcademicAffiliation': 'INDUSTRY'},
{'institution': 'verified institution',
'role': 'verified institution role 1',
'isVerified': True, 'nonAcademicAffiliation': 'UNSET'}]}],
'hasVerifiedInstitution': True,
'excludeFromPublicDirectory': False, 'ethicalLegalSocialImplications': False,
'reviewRequested': False, 'diseaseFocusedResearch': True,
'diseaseFocusedResearchName': 'disease focused research name str 2',
'otherPurposeDetails': 'other purpose details str 2', 'methodsDevelopment': False,
'controlSet': False, 'ancestry': False, 'socialBehavioral': False, 'populationHealth': False,
'drugDevelopment': False, 'commercialPurpose': False, 'educational': False,
'otherPurpose': False, 'scientificApproaches': 'reasonForInvestigation string2',
'intendToStudy': 'intendToStudy string2',
'findingsFromStudy': 'findingsFromStudy string2',
'focusOnUnderrepresentedPopulations': None,
'accessTier': 'UNSET',
'workspaceDemographic': {
"raceEthnicity": None,
"age": None,
"sexAtBirth": None,
"genderIdentity": None,
"sexualOrientation": None,
"geography": None,
"disabilityStatus": None,
"accessToCare": None,
"educationLevel": None,
"incomeLevel": None,
"others": None
},
'cdrVersion': None
},
result['data'])
# update researcher to add verified institution
researchers_json = [
{
"userId": 1,
"creationTime": "2019-11-27T21:21:13.056Z",
"modifiedTime": "2019-11-27T21:21:14.056Z",
"givenName": "given name 2",
"familyName": "family name 2",
"streetAddress1": "string2",
"streetAddress2": "string2",
"city": "string2",
"state": "string2",
"zipCode": "string2",
"country": "string2",
"ethnicity": "HISPANIC",
"sexualOrientation": "BISEXUAL",
"gender": ["MAN", "WOMAN"],
"race": ["AIAN", "WHITE"],
"affiliations": [
{
"institution": "institution2",
"role": "institution role 2"
},
{
"institution": "institution22",
"role": "institution role 22",
"nonAcademicAffiliation": "INDUSTRY"
}
],
"verifiedInstitutionalAffiliation": {
"institutionShortName": "verified institution",
"institutionalRole": "verified institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
}
]
self.send_post('workbench/directory/researchers', request_data=researchers_json)
result = self.send_get('researchHub/projectDirectory')
self.assertEqual(len(result['data']), 2)
def test_get_audit_researchers_with_params(self):
researchers_json = [
{
"userId": 0,
"creationTime": "2019-11-26T21:21:13.056Z",
"modifiedTime": "2019-11-26T21:21:13.056Z",
"givenName": "given name 1",
"familyName": "family name 1",
"email": "tester@email.com",
"streetAddress1": "string",
"streetAddress2": "string",
"city": "string",
"state": "string",
"zipCode": "string",
"country": "string",
"ethnicity": "HISPANIC",
"gender": ["MAN"],
"race": ["AIAN"],
"degree": ["PHD", "MPH"],
"sexAtBirth": ["FEMALE"],
"sexualOrientation": "BISEXUAL",
"affiliations": [
{
"institution": "institution1",
"role": "institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
],
"verifiedInstitutionalAffiliation": {
"institutionDisplayName": "display name",
"institutionShortName": "verified institution",
"institutionalRole": "verified institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
},
{
"userId": 1,
"creationTime": "2019-11-27T21:21:13.056Z",
"modifiedTime": "2019-11-27T21:21:13.056Z",
"givenName": "given name 2",
"familyName": "family name 2",
"streetAddress1": "string2",
"streetAddress2": "string2",
"city": "string2",
"state": "string2",
"zipCode": "string2",
"country": "string2",
"ethnicity": "HISPANIC",
"sexualOrientation": "BISEXUAL",
"gender": ["MAN", "WOMAN"],
"race": ["AIAN", "WHITE"],
"degree": ["PHD", "MPH"],
"affiliations": [
{
"institution": "institution2",
"role": "institution role 2"
},
{
"institution": "institution22",
"role": "institution role 22",
"nonAcademicAffiliation": "INDUSTRY"
}
],
"verifiedInstitutionalAffiliation": {
"institutionShortName": "verified institution",
"institutionalRole": "verified institution role 1",
"nonAcademicAffiliation": "INDUSTRY"
}
}
]
self.send_post('workbench/directory/researchers', request_data=researchers_json)
result = self.send_get('workbench/audit/researcher/snapshots')
self.assertEqual(len(result), 2)
self.assertIsNotNone(result[0]['givenName'])
self.assertIsNotNone(result[0]['familyName'])
self.assertIsNotNone(result[0]['email'])
self.assertEqual(len(result[0]['affiliations']), 2)
self.assertIsNotNone(result[1]['givenName'])
self.assertIsNotNone(result[1]['familyName'])
self.assertIsNone(result[1]['email'])
self.assertEqual(len(result[1]['affiliations']), 3)
result = self.send_get('workbench/audit/researcher/snapshots?snapshot_id=1')
self.assertEqual(len(result), 1)
self.assertEqual(result[0]['givenName'], 'given name 1')
self.assertEqual(result[0]['familyName'], 'family name 1')
self.assertEqual(result[0]['email'], 'tester@email.com')
result = self.send_get('workbench/audit/researcher/snapshots?last_snapshot_id=2')
self.assertEmpty(result)
result = self.send_get('workbench/audit/researcher/snapshots?last_snapshot_id=1')
self.assertEqual(len(result), 1)
self.assertEqual(result[0]['givenName'], 'given name 2')
self.assertEqual(result[0]['familyName'], 'family name 2')
self.assertIsNone(result[0]['email'])
result = self.send_get('workbench/audit/researcher/snapshots?user_source_id=1')
self.assertEqual(len(result), 1)
self.assertEqual(result[0]['givenName'], 'given name 2')
self.assertEqual(result[0]['familyName'], 'family name 2')
self.assertIsNone(result[0]['email'])
| 53.555838
| 120
| 0.468592
| 5,374
| 84,404
| 7.287309
| 0.050242
| 0.03677
| 0.027654
| 0.016496
| 0.964966
| 0.958633
| 0.951356
| 0.940504
| 0.9308
| 0.913488
| 0
| 0.041164
| 0.412279
| 84,404
| 1,575
| 121
| 53.589841
| 0.748297
| 0.01135
| 0
| 0.823995
| 0
| 0
| 0.40721
| 0.123277
| 0
| 0
| 0
| 0
| 0.04944
| 1
| 0.004614
| false
| 0
| 0.002637
| 0
| 0.00791
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c54030d70028f11e5e36d22d7078af92a579053c
| 113
|
py
|
Python
|
AlphaPose/alphapose/version.py
|
NeelayS/pose_tracking
|
68d151eef0c6a84623348f7be76ece709e6ca57a
|
[
"MIT"
] | null | null | null |
AlphaPose/alphapose/version.py
|
NeelayS/pose_tracking
|
68d151eef0c6a84623348f7be76ece709e6ca57a
|
[
"MIT"
] | null | null | null |
AlphaPose/alphapose/version.py
|
NeelayS/pose_tracking
|
68d151eef0c6a84623348f7be76ece709e6ca57a
|
[
"MIT"
] | null | null | null |
# GENERATED VERSION FILE
# TIME: Tue Oct 6 17:46:50 2020
__version__ = "0.3.0+0e6315d"
short_version = "0.3.0"
| 18.833333
| 32
| 0.690265
| 21
| 113
| 3.47619
| 0.714286
| 0.219178
| 0.246575
| 0.273973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.234043
| 0.168142
| 113
| 5
| 33
| 22.6
| 0.542553
| 0.469027
| 0
| 0
| 1
| 0
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c543d501e3dafaac9ed3b3b3afb83a9a2ce50394
| 271
|
py
|
Python
|
python/iniparse/configparser.py
|
Soft8Soft/verge3d-blender-to-webgl-addon
|
8e860242a0bce5098edfe640c65a73aa0a333210
|
[
"Apache-2.0"
] | 86
|
2018-08-14T17:08:27.000Z
|
2022-03-22T10:35:28.000Z
|
python/iniparse/configparser.py
|
Soft8Soft/verge3d-blender-to-webgl-addon
|
8e860242a0bce5098edfe640c65a73aa0a333210
|
[
"Apache-2.0"
] | 10
|
2019-01-18T15:47:34.000Z
|
2021-06-04T13:17:01.000Z
|
python/iniparse/configparser.py
|
Soft8Soft/verge3d-blender-to-webgl-addon
|
8e860242a0bce5098edfe640c65a73aa0a333210
|
[
"Apache-2.0"
] | 18
|
2018-08-15T10:32:19.000Z
|
2022-02-28T16:41:51.000Z
|
try:
from ConfigParser import *
# not all objects get imported with __all__
from ConfigParser import Error, InterpolationMissingOptionError
except ImportError:
from configparser import *
from configparser import Error, InterpolationMissingOptionError
| 33.875
| 67
| 0.793358
| 26
| 271
| 8.115385
| 0.538462
| 0.303318
| 0.417062
| 0.255924
| 0.549763
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.180812
| 271
| 7
| 68
| 38.714286
| 0.95045
| 0.151292
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.833333
| 0
| 0.833333
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
c54516232146b735018c8bdaf9bb58ff9d16ea60
| 189
|
py
|
Python
|
app/default/__init__.py
|
Brunoro811/api_dangels
|
21c064eaa4f5009412dddc9676044d6cc08a5b65
|
[
"MIT"
] | null | null | null |
app/default/__init__.py
|
Brunoro811/api_dangels
|
21c064eaa4f5009412dddc9676044d6cc08a5b65
|
[
"MIT"
] | null | null | null |
app/default/__init__.py
|
Brunoro811/api_dangels
|
21c064eaa4f5009412dddc9676044d6cc08a5b65
|
[
"MIT"
] | null | null | null |
from .default_types_users import default_types_users
from .default_client import default_client
from .default_types_sales import default_types_sales
from .default_values import data_month
| 31.5
| 52
| 0.888889
| 28
| 189
| 5.571429
| 0.357143
| 0.282051
| 0.205128
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089947
| 189
| 5
| 53
| 37.8
| 0.906977
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
9a72068748c1b6b7edc8f816c0d3132e613b037b
| 28,001
|
py
|
Python
|
tests/test_partmod.py
|
ldevulder/yomi
|
9a9097a0704810e0b730152effdfb7f352e9b4eb
|
[
"Apache-2.0"
] | null | null | null |
tests/test_partmod.py
|
ldevulder/yomi
|
9a9097a0704810e0b730152effdfb7f352e9b4eb
|
[
"Apache-2.0"
] | null | null | null |
tests/test_partmod.py
|
ldevulder/yomi
|
9a9097a0704810e0b730152effdfb7f352e9b4eb
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
#
# Author: Alberto Planas <aplanas@suse.com>
#
# Copyright 2019 SUSE LLC.
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
from unittest.mock import patch
from salt.exceptions import SaltInvocationError
from disk import ParseException
from modules import partmod
from modules import filters
class PartmodTestCase(unittest.TestCase):
@patch('modules.partmod.__grains__')
def test_prepare_partition_data_fails_fs_type(self, __grains__):
partitions = {
'devices': {
'/dev/sda': {
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'error',
},
],
},
},
}
__grains__.__getitem__.return_value = False
with self.assertRaises(SaltInvocationError) as cm:
partmod.prepare_partition_data(partitions)
self.assertTrue('type error not recognized' in str(cm.exception))
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_fails_units_invalid(self, __salt__,
__grains__):
partitions = {
'devices': {
'/dev/sda': {
'partitions': [
{
'number': 1,
'size': '1Kilo',
'type': 'swap',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
with self.assertRaises(ParseException) as cm:
partmod.prepare_partition_data(partitions)
self.assertTrue('Kilo not recognized' in str(cm.exception))
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_fails_units_initial_gap(self, __salt__,
__grains__):
partitions = {
'config': {
'initial_gap': '1024kB',
},
'devices': {
'/dev/sda': {
'partitions': [
{
'number': 1,
'size': '1MB',
'type': 'swap',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
with self.assertRaises(SaltInvocationError) as cm:
partmod.prepare_partition_data(partitions)
self.assertTrue('Units needs to be' in str(cm.exception))
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_bios_no_gap(self, __salt__, __grains__):
partitions = {
'devices': {
'/dev/sda': {
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'linux',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
self.assertEqual(partmod.prepare_partition_data(partitions), {
'/dev/sda': {
'label': 'msdos',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sda1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '0%',
'end': '100%',
},
],
},
})
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_bios_msdos_no_gap(self, __salt__,
__grains__):
partitions = {
'config': {
'label': 'msdos',
},
'devices': {
'/dev/sda': {
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'linux',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
self.assertEqual(partmod.prepare_partition_data(partitions), {
'/dev/sda': {
'label': 'msdos',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sda1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '0%',
'end': '100%',
},
],
},
})
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_bios_local_msdos_no_gap(self, __salt__,
__grains__):
partitions = {
'devices': {
'/dev/sda': {
'label': 'msdos',
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'linux',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
self.assertEqual(partmod.prepare_partition_data(partitions), {
'/dev/sda': {
'label': 'msdos',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sda1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '0%',
'end': '100%',
},
],
},
})
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_bios_gpt_no_gap(self, __salt__,
__grains__):
partitions = {
'config': {
'label': 'gpt',
},
'devices': {
'/dev/sda': {
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'linux',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
self.assertEqual(partmod.prepare_partition_data(partitions), {
'/dev/sda': {
'label': 'gpt',
'pmbr_boot': True,
'partitions': [
{
'part_id': '/dev/sda1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '0%',
'end': '100%',
},
],
},
})
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_bios_local_gpt_no_gap(self, __salt__,
__grains__):
partitions = {
'devices': {
'/dev/sda': {
'label': 'gpt',
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'linux',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
self.assertEqual(partmod.prepare_partition_data(partitions), {
'/dev/sda': {
'label': 'gpt',
'pmbr_boot': True,
'partitions': [
{
'part_id': '/dev/sda1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '0%',
'end': '100%',
},
],
},
})
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_gap(self, __salt__, __grains__):
partitions = {
'config': {
'initial_gap': '1MB',
},
'devices': {
'/dev/sda': {
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'linux',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
self.assertEqual(partmod.prepare_partition_data(partitions), {
'/dev/sda': {
'label': 'msdos',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sda1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '1.0MB',
'end': '100%',
},
],
},
})
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_local_gap(self, __salt__, __grains__):
partitions = {
'devices': {
'/dev/sda': {
'initial_gap': '1MB',
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'linux',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
self.assertEqual(partmod.prepare_partition_data(partitions), {
'/dev/sda': {
'label': 'msdos',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sda1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '1.0MB',
'end': '100%',
},
],
},
})
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_fails_rest(self, __salt__, __grains__):
partitions = {
'devices': {
'/dev/sda': {
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'swap',
},
{
'number': 2,
'size': 'rest',
'type': 'linux',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
with self.assertRaises(SaltInvocationError) as cm:
partmod.prepare_partition_data(partitions)
self.assertTrue('rest free space' in str(cm.exception))
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_fails_units(self, __salt__, __grains__):
partitions = {
'devices': {
'/dev/sda': {
'partitions': [
{
'number': 1,
'size': '1%',
'type': 'swap',
},
{
'number': 2,
'size': '2MB',
'type': 'linux',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
with self.assertRaises(SaltInvocationError) as cm:
partmod.prepare_partition_data(partitions)
self.assertTrue('Units needs to be' in str(cm.exception))
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_efi_partitions(self, __salt__, __grains__):
partitions = {
'devices': {
'/dev/sda': {
'label': 'gpt',
'partitions': [
{
'number': 1,
'size': '500MB',
'type': 'efi',
},
{
'number': 2,
'size': '10000MB',
'type': 'linux',
},
{
'number': 3,
'size': '5000MB',
'type': 'swap',
},
],
},
},
}
__grains__.__getitem__.return_value = True
__salt__.__getitem__.return_value = filters.is_raid
self.assertEqual(partmod.prepare_partition_data(partitions), {
'/dev/sda': {
'label': 'gpt',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sda1',
'part_type': 'primary',
'fs_type': 'fat16',
'flags': ['esp'],
'start': '0MB',
'end': '500.0MB',
},
{
'part_id': '/dev/sda2',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '500.0MB',
'end': '10500.0MB',
},
{
'part_id': '/dev/sda3',
'part_type': 'primary',
'fs_type': 'linux-swap',
'flags': None,
'start': '10500.0MB',
'end': '15500.0MB',
},
],
},
})
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_bios_muti_label(self, __salt__,
__grains__):
partitions = {
'config': {
'label': 'msdos',
},
'devices': {
'/dev/sda': {
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'linux',
},
],
},
'/dev/sdb': {
'label': 'gpt',
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'linux',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
self.assertEqual(partmod.prepare_partition_data(partitions), {
'/dev/sda': {
'label': 'msdos',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sda1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '0%',
'end': '100%',
},
],
},
'/dev/sdb': {
'label': 'gpt',
'pmbr_boot': True,
'partitions': [
{
'part_id': '/dev/sdb1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '0%',
'end': '100%',
},
],
},
})
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_multi_gap(self, __salt__, __grains__):
partitions = {
'config': {
'initial_gap': '1MB',
},
'devices': {
'/dev/sda': {
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'linux',
},
],
},
'/dev/sdb': {
'initial_gap': '2MB',
'partitions': [
{
'number': 1,
'size': '20MB',
'type': 'linux',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
self.assertEqual(partmod.prepare_partition_data(partitions), {
'/dev/sda': {
'label': 'msdos',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sda1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '1.0MB',
'end': '100%',
},
],
},
'/dev/sdb': {
'label': 'msdos',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sdb1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '2.0MB',
'end': '22.0MB',
},
],
},
})
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_lvm(self, __salt__, __grains__):
partitions = {
'devices': {
'/dev/sda': {
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'lvm',
},
],
},
'/dev/sdb': {
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'lvm',
},
],
},
'/dev/sdc': {
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'linux',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
self.assertEqual(partmod.prepare_partition_data(partitions), {
'/dev/sda': {
'label': 'msdos',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sda1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': ['lvm'],
'start': '0%',
'end': '100%',
},
],
},
'/dev/sdb': {
'label': 'msdos',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sdb1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': ['lvm'],
'start': '0%',
'end': '100%',
},
],
},
'/dev/sdc': {
'label': 'msdos',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sdc1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '0%',
'end': '100%',
},
],
},
})
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_raid(self, __salt__, __grains__):
partitions = {
'devices': {
'/dev/sda': {
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'raid',
},
],
},
'/dev/sdb': {
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'raid',
},
],
},
'/dev/sdc': {
'partitions': [
{
'number': 1,
'size': 'rest',
'type': 'linux',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
self.assertEqual(partmod.prepare_partition_data(partitions), {
'/dev/sda': {
'label': 'msdos',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sda1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': ['raid'],
'start': '0%',
'end': '100%',
},
],
},
'/dev/sdb': {
'label': 'msdos',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sdb1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': ['raid'],
'start': '0%',
'end': '100%',
},
],
},
'/dev/sdc': {
'label': 'msdos',
'pmbr_boot': False,
'partitions': [
{
'part_id': '/dev/sdc1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '0%',
'end': '100%',
},
],
},
})
@patch('modules.partmod.__grains__')
@patch('modules.partmod.__salt__')
def test_prepare_partition_data_bios_gpt_post_raid(self, __salt__, __grains__):
partitions = {
'devices': {
'/dev/md0': {
'label': 'gpt',
'partitions': [
{
'number': 1,
'size': '8MB',
'type': 'boot',
},
{
'number': 2,
'size': 'rest',
'type': 'linux',
},
],
},
},
}
__grains__.__getitem__.return_value = False
__salt__.__getitem__.return_value = filters.is_raid
self.assertEqual(partmod.prepare_partition_data(partitions), {
'/dev/md0': {
'label': 'gpt',
'pmbr_boot': True,
'partitions': [
{
'part_id': '/dev/md0p1',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': ['bios_grub'],
'start': '0MB',
'end': '8.0MB',
},
{
'part_id': '/dev/md0p2',
'part_type': 'primary',
'fs_type': 'ext2',
'flags': None,
'start': '8.0MB',
'end': '100%',
},
],
},
})
if __name__ == '__main__':
unittest.main()
| 34.105968
| 83
| 0.341595
| 1,740
| 28,001
| 5.007471
| 0.112644
| 0.066108
| 0.082635
| 0.057845
| 0.84483
| 0.836451
| 0.825778
| 0.811431
| 0.7871
| 0.76449
| 0
| 0.017139
| 0.541588
| 28,001
| 820
| 84
| 34.147561
| 0.661655
| 0.030035
| 0
| 0.675291
| 0
| 0
| 0.166046
| 0.032281
| 0
| 0
| 0
| 0
| 0.029754
| 1
| 0.023286
| false
| 0
| 0.007762
| 0
| 0.032342
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9a922805cdfe3a5f391e31c385094515db7aa725
| 329
|
py
|
Python
|
quadpy/pyramid/__init__.py
|
dariusarnold/quadpy
|
9dc7c1ebff99d15ae57ed9195cde94d97a599be8
|
[
"MIT"
] | null | null | null |
quadpy/pyramid/__init__.py
|
dariusarnold/quadpy
|
9dc7c1ebff99d15ae57ed9195cde94d97a599be8
|
[
"MIT"
] | null | null | null |
quadpy/pyramid/__init__.py
|
dariusarnold/quadpy
|
9dc7c1ebff99d15ae57ed9195cde94d97a599be8
|
[
"MIT"
] | null | null | null |
from ._felippa import (
felippa_1,
felippa_2,
felippa_3,
felippa_4,
felippa_5,
felippa_6,
felippa_7,
felippa_8,
felippa_9,
)
__all__ = [
"felippa_1",
"felippa_2",
"felippa_3",
"felippa_4",
"felippa_5",
"felippa_6",
"felippa_7",
"felippa_8",
"felippa_9",
]
| 13.708333
| 23
| 0.568389
| 40
| 329
| 4.1
| 0.325
| 0.097561
| 0.182927
| 0.195122
| 0.878049
| 0.878049
| 0.878049
| 0.878049
| 0.878049
| 0.878049
| 0
| 0.078603
| 0.303951
| 329
| 23
| 24
| 14.304348
| 0.637555
| 0
| 0
| 0
| 0
| 0
| 0.246201
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.045455
| 0
| 0.045455
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9aa26d8dd4c0ca6d017595837dc3d71868518eb4
| 69,631
|
py
|
Python
|
tests/test_questionnaireresponse.py
|
glichtner/fhir.resources
|
94896d8f8a0b7dd69253762aab968f4fd6eb69a0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_questionnaireresponse.py
|
glichtner/fhir.resources
|
94896d8f8a0b7dd69253762aab968f4fd6eb69a0
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_questionnaireresponse.py
|
glichtner/fhir.resources
|
94896d8f8a0b7dd69253762aab968f4fd6eb69a0
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Profile: http://hl7.org/fhir/StructureDefinition/QuestionnaireResponse
Release: R5
Version: 4.5.0
Build ID: 0d95498
Last updated: 2021-04-03T00:34:11.075+00:00
"""
from pydantic.validators import bytes_validator # noqa: F401
from fhir.resources import fhirtypes # noqa: F401
from fhir.resources import questionnaireresponse
def impl_questionnaireresponse_1(inst):
assert inst.author.reference == "Practitioner/f201"
assert inst.authored == fhirtypes.DateTime.validate("2013-06-18T00:00:00+01:00")
assert inst.id == "f201"
assert inst.item[0].answer[0].valueBoolean is True
assert inst.item[0].linkId == "1"
assert inst.item[0].text == "Do you have allergies?"
assert inst.item[1].item[0].answer[0].valueString == "Male"
assert inst.item[1].item[0].linkId == "2.1"
assert inst.item[1].item[0].text == "What is your gender?"
assert inst.item[1].item[1].answer[0].valueDate == fhirtypes.Date.validate(
"1960-03-13"
)
assert inst.item[1].item[1].linkId == "2.2"
assert inst.item[1].item[1].text == "What is your date of birth?"
assert inst.item[1].item[2].answer[0].valueString == "The Netherlands"
assert inst.item[1].item[2].linkId == "2.3"
assert inst.item[1].item[2].text == "What is your country of birth?"
assert inst.item[1].item[3].answer[0].valueString == "married"
assert inst.item[1].item[3].linkId == "2.4"
assert inst.item[1].item[3].text == "What is your marital status?"
assert inst.item[1].linkId == "2"
assert inst.item[1].text == "General questions"
assert inst.item[2].item[0].answer[0].valueBoolean is False
assert inst.item[2].item[0].linkId == "3.1"
assert inst.item[2].item[0].text == "Do you smoke?"
assert inst.item[2].item[1].answer[0].valueBoolean is False
assert inst.item[2].item[1].linkId == "3.2"
assert inst.item[2].item[1].text == "Do you drink alchohol?"
assert inst.item[2].linkId == "3"
assert inst.item[2].text == "Intoxications"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.source.reference == "Practitioner/f201"
assert inst.status == "completed"
assert inst.subject.display == "Roel"
assert inst.subject.reference == "Patient/f201"
assert inst.text.div == (
"<div " 'xmlns="http://www.w3.org/1999/xhtml"><div>todo</div></div>'
)
assert inst.text.status == "generated"
def test_questionnaireresponse_1(base_settings):
"""No. 1 tests collection for QuestionnaireResponse.
Test File: questionnaireresponse-example-f201-lifelines.json
"""
filename = (
base_settings["unittest_data_dir"]
/ "questionnaireresponse-example-f201-lifelines.json"
)
inst = questionnaireresponse.QuestionnaireResponse.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "QuestionnaireResponse" == inst.resource_type
impl_questionnaireresponse_1(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "QuestionnaireResponse" == data["resourceType"]
inst2 = questionnaireresponse.QuestionnaireResponse(**data)
impl_questionnaireresponse_1(inst2)
def impl_questionnaireresponse_2(inst):
assert inst.author.reference == "#questauth"
assert inst.authored == fhirtypes.DateTime.validate("2013-02-19T14:15:00-05:00")
assert inst.basedOn[0].reference == "#order"
assert inst.contained[0].id == "patsub"
assert inst.contained[1].id == "order"
assert inst.contained[2].id == "questauth"
assert inst.encounter.reference == "Encounter/example"
assert inst.id == "3141"
assert (
inst.identifier.system
== "http://example.org/fhir/NamingSystem/questionnaire-ids"
)
assert inst.identifier.value == "Q12349876"
assert (
inst.item[0].item[0].answer[0].item[0].item[0].answer[0].valueCoding.code == "1"
)
assert (
inst.item[0].item[0].answer[0].item[0].item[0].answer[0].valueCoding.system
== "http://cancer.questionnaire.org/system/code/yesno"
)
assert inst.item[0].item[0].answer[0].item[0].item[0].linkId == "1.1.1.1"
assert (
inst.item[0].item[0].answer[0].item[0].item[1].answer[0].valueCoding.code == "1"
)
assert (
inst.item[0].item[0].answer[0].item[0].item[1].answer[0].valueCoding.system
== "http://cancer.questionnaire.org/system/code/yesno"
)
assert inst.item[0].item[0].answer[0].item[0].item[1].linkId == "1.1.1.2"
assert (
inst.item[0].item[0].answer[0].item[0].item[2].answer[0].valueCoding.code == "0"
)
assert (
inst.item[0].item[0].answer[0].item[0].item[2].answer[0].valueCoding.system
== "http://cancer.questionnaire.org/system/code/yesno"
)
assert inst.item[0].item[0].answer[0].item[0].item[2].linkId == "1.1.1.3"
assert inst.item[0].item[0].answer[0].item[0].linkId == "1.1.1"
assert inst.item[0].item[0].answer[0].valueCoding.code == "1"
assert inst.item[0].item[0].answer[0].valueCoding.display == "Yes"
assert (
inst.item[0].item[0].answer[0].valueCoding.system
== "http://cancer.questionnaire.org/system/code/yesno"
)
assert inst.item[0].item[0].linkId == "1.1"
assert inst.item[0].linkId == "1"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.partOf[0].reference == "Procedure/f201"
assert inst.status == "completed"
assert inst.subject.reference == "#patsub"
assert inst.text.status == "generated"
def test_questionnaireresponse_2(base_settings):
"""No. 2 tests collection for QuestionnaireResponse.
Test File: questionnaireresponse-example.json
"""
filename = base_settings["unittest_data_dir"] / "questionnaireresponse-example.json"
inst = questionnaireresponse.QuestionnaireResponse.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "QuestionnaireResponse" == inst.resource_type
impl_questionnaireresponse_2(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "QuestionnaireResponse" == data["resourceType"]
inst2 = questionnaireresponse.QuestionnaireResponse(**data)
impl_questionnaireresponse_2(inst2)
def impl_questionnaireresponse_3(inst):
assert inst.authored == fhirtypes.DateTime.validate("2008-01-17")
assert inst.id == "ussg-fht-answers"
assert inst.item[0].item[0].answer[0].valueDate == fhirtypes.Date.validate(
"2008-01-17"
)
assert inst.item[0].item[0].linkId == "0.1"
assert inst.item[0].item[0].text == "Date Done"
assert inst.item[0].linkId == "0"
assert inst.item[1].definition == "http://loinc.org/fhir/DataElement/54126-8"
assert inst.item[1].item[0].item[0].answer[0].valueString == "Annie Proband"
assert (
inst.item[1].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54125-0"
)
assert inst.item[1].item[0].item[0].linkId == "1.1.1"
assert inst.item[1].item[0].item[0].text == "Name"
assert inst.item[1].item[0].item[1].answer[0].valueCoding.code == "LA3-6"
assert inst.item[1].item[0].item[1].answer[0].valueCoding.display == "Female"
assert (
inst.item[1].item[0].item[1].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[1].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54131-8"
)
assert inst.item[1].item[0].item[1].linkId == "1.1.2"
assert inst.item[1].item[0].item[1].text == "Gender"
assert inst.item[1].item[0].item[2].answer[0].valueDate == fhirtypes.Date.validate(
"1966-04-04"
)
assert (
inst.item[1].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/21112-8"
)
assert inst.item[1].item[0].item[2].linkId == "1.1.3"
assert inst.item[1].item[0].item[2].text == "Date of Birth"
assert inst.item[1].item[0].item[3].answer[0].valueCoding.code == "LA32-8"
assert inst.item[1].item[0].item[3].answer[0].valueCoding.display == "No"
assert (
inst.item[1].item[0].item[3].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[1].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54132-6"
)
assert inst.item[1].item[0].item[3].linkId == "1.1.4"
assert inst.item[1].item[0].item[3].text == "Were you born a twin?"
assert inst.item[1].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[1].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[1].item[0].item[4].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[1].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54128-4"
)
assert inst.item[1].item[0].item[4].linkId == "1.1.5"
assert inst.item[1].item[0].item[4].text == "Were you adopted?"
assert inst.item[1].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[1].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[1].item[0].item[5].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[1].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54135-9"
)
assert inst.item[1].item[0].item[5].linkId == "1.1.6"
assert inst.item[1].item[0].item[5].text == (
"Are your parents related to each other in any way other than" " marriage?"
)
assert (
inst.item[1]
.item[0]
.item[6]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.code
== "[in_i]"
)
assert (
inst.item[1]
.item[0]
.item[6]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.display
== "inches"
)
assert (
inst.item[1]
.item[0]
.item[6]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.system
== "http://unitsofmeasure.org"
)
assert inst.item[1].item[0].item[6].answer[0].item[0].item[0].linkId == "1.1.7.1.1"
assert inst.item[1].item[0].item[6].answer[0].item[0].item[0].text == "Units"
assert inst.item[1].item[0].item[6].answer[0].item[0].linkId == "1.1.7.1"
assert float(inst.item[1].item[0].item[6].answer[0].valueDecimal) == float(63)
assert (
inst.item[1].item[0].item[6].definition
== "http://loinc.org/fhir/DataElement/8302-2"
)
assert inst.item[1].item[0].item[6].linkId == "1.1.7"
assert inst.item[1].item[0].item[6].text == "Height"
assert (
inst.item[1]
.item[0]
.item[7]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.code
== "[lb_av]"
)
assert (
inst.item[1]
.item[0]
.item[7]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.display
== "pounds"
)
assert (
inst.item[1]
.item[0]
.item[7]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.system
== "http://unitsofmeasure.org"
)
assert inst.item[1].item[0].item[7].answer[0].item[0].item[0].linkId == "1.1.8.1.1"
assert inst.item[1].item[0].item[7].answer[0].item[0].item[0].text == "Units"
assert inst.item[1].item[0].item[7].answer[0].item[0].linkId == "1.1.8.1"
assert float(inst.item[1].item[0].item[7].answer[0].valueDecimal) == float(127)
assert (
inst.item[1].item[0].item[7].definition
== "http://loinc.org/fhir/DataElement/29463-7"
)
assert inst.item[1].item[0].item[7].linkId == "1.1.8"
assert inst.item[1].item[0].item[7].text == "Weight"
assert float(inst.item[1].item[0].item[8].answer[0].valueDecimal) == float(22.5)
assert (
inst.item[1].item[0].item[8].definition
== "http://loinc.org/fhir/DataElement/39156-5"
)
assert inst.item[1].item[0].item[8].linkId == "1.1.9"
assert inst.item[1].item[0].item[8].text == "Body mass index (BMI) [Ratio]"
assert inst.item[1].item[0].item[9].answer[0].valueCoding.code == "LA4457-3"
assert inst.item[1].item[0].item[9].answer[0].valueCoding.display == "White"
assert (
inst.item[1].item[0].item[9].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[1].item[0].item[9].definition
== "http://loinc.org/fhir/DataElement/54134-2"
)
assert inst.item[1].item[0].item[9].linkId == "1.1.10"
assert inst.item[1].item[0].item[9].text == "Race"
assert inst.item[1].item[0].linkId == "1.1"
assert inst.item[1].linkId == "1"
assert inst.item[1].text == "Your health information"
assert inst.item[2].definition == "http://loinc.org/fhir/DataElement/54114-4"
assert inst.item[2].item[0].item[0].answer[0].valueCoding.code == "LA10405-1"
assert inst.item[2].item[0].item[0].answer[0].valueCoding.display == "Daughter"
assert (
inst.item[2].item[0].item[0].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[2].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[0].item[1].answer[0].valueString == "Susan"
assert (
inst.item[2].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[0].item[1].text == "Name"
assert inst.item[2].item[0].item[2].answer[0].valueCoding.code == "LA3-6"
assert inst.item[2].item[0].item[2].answer[0].valueCoding.display == "Female"
assert (
inst.item[2].item[0].item[2].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[2].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[0].item[2].text == "Gender"
assert float(
inst.item[2].item[0].item[3].answer[0].item[0].item[0].answer[0].valueDecimal
) == float(17)
assert (
inst.item[2].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[0].item[3].answer[0].item[0].item[0].linkId == "2.1.1.4.2.2"
)
assert inst.item[2].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[0].item[3].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[2].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[0].item[3].text == "Living?"
assert inst.item[2].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[0].item[4].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[2].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[0].item[5].answer[0].valueCoding.system == "http://loinc.org"
)
assert (
inst.item[2].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[0].linkId == "2.1"
assert (
inst.item[2].item[1].item[0].item[0].answer[0].valueCoding.code == "LA10415-0"
)
assert (
inst.item[2].item[1].item[0].item[0].answer[0].valueCoding.display == "Brother"
)
assert (
inst.item[2].item[1].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[1].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[1].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[1].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[1].item[0].item[1].answer[0].valueString == "Brian"
assert (
inst.item[2].item[1].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[1].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[1].item[0].item[1].text == "Name"
assert inst.item[2].item[1].item[0].item[2].answer[0].valueCoding.code == "LA2-8"
assert inst.item[2].item[1].item[0].item[2].answer[0].valueCoding.display == "Male"
assert (
inst.item[2].item[1].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[1].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[1].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[1].item[0].item[2].text == "Gender"
assert float(
inst.item[2]
.item[1]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal
) == float(32)
assert (
inst.item[2].item[1].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[1].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.2.2"
)
assert inst.item[2].item[1].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[1].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[1].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[1].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[1].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[1].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[1].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[1].item[0].item[3].text == "Living?"
assert inst.item[2].item[1].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[1].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[1].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[1].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[1].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[1].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[1].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[1].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[1].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[1].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[1].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[1].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[1].item[0].linkId == "2.1.1"
assert (
inst.item[2].item[1].item[1].item[0].answer[0].valueCoding.code == "LA10550-4"
)
assert (
inst.item[2].item[1].item[1].item[0].answer[0].valueCoding.display
== "-- Other Cancer"
)
assert (
inst.item[2].item[1].item[1].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert inst.item[2].item[1].item[1].item[0].linkId == "2.1.2.1"
assert inst.item[2].item[1].item[1].item[0].text == "Disease or Condition"
assert (
inst.item[2].item[1].item[1].item[1].answer[0].valueCoding.code == "LA10397-0"
)
assert inst.item[2].item[1].item[1].item[1].answer[0].valueCoding.display == "30-39"
assert (
inst.item[2].item[1].item[1].item[1].answer[0].valueCoding.system
== "http://loinc.org"
)
assert inst.item[2].item[1].item[1].item[1].linkId == "2.1.2.2"
assert inst.item[2].item[1].item[1].item[1].text == "Age at Diagnosis"
assert inst.item[2].item[1].item[1].linkId == "2.1.2"
assert (
inst.item[2].item[1].item[1].text == "This family member's history of disease"
)
assert inst.item[2].item[1].linkId == "2.1"
assert (
inst.item[2].item[2].item[0].item[0].answer[0].valueCoding.code == "LA10418-4"
)
assert (
inst.item[2].item[2].item[0].item[0].answer[0].valueCoding.display == "Sister"
)
assert (
inst.item[2].item[2].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[2].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[2].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[2].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[2].item[0].item[1].answer[0].valueString == "Janet"
assert (
inst.item[2].item[2].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[2].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[2].item[0].item[1].text == "Name"
assert inst.item[2].item[2].item[0].item[2].answer[0].valueCoding.code == "LA3-6"
assert (
inst.item[2].item[2].item[0].item[2].answer[0].valueCoding.display == "Female"
)
assert (
inst.item[2].item[2].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[2].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[2].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[2].item[0].item[2].text == "Gender"
assert float(
inst.item[2]
.item[2]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal
) == float(36)
assert (
inst.item[2].item[2].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[2].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.2.2"
)
assert inst.item[2].item[2].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[2].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[2].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[2].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[2].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[2].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[2].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[2].item[0].item[3].text == "Living?"
assert inst.item[2].item[2].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[2].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[2].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[2].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[2].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[2].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[2].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[2].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[2].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[2].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[2].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[2].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[2].item[0].linkId == "2.1.1"
assert (
inst.item[2].item[2].item[1].item[0].answer[0].valueCoding.code == "LA10536-3"
)
assert (
inst.item[2].item[2].item[1].item[0].answer[0].valueCoding.display
== "-- Breast Cancer"
)
assert (
inst.item[2].item[2].item[1].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert inst.item[2].item[2].item[1].item[0].linkId == "2.1.2.1"
assert inst.item[2].item[2].item[1].item[0].text == "Disease or Condition"
assert (
inst.item[2].item[2].item[1].item[1].answer[0].valueCoding.code == "LA10397-0"
)
assert inst.item[2].item[2].item[1].item[1].answer[0].valueCoding.display == "30-39"
assert (
inst.item[2].item[2].item[1].item[1].answer[0].valueCoding.system
== "http://loinc.org"
)
assert inst.item[2].item[2].item[1].item[1].linkId == "2.1.2.2"
assert inst.item[2].item[2].item[1].item[1].text == "Age at Diagnosis"
assert inst.item[2].item[2].item[1].linkId == "2.1.2"
assert (
inst.item[2].item[2].item[1].text == "This family member's history of disease"
)
assert inst.item[2].item[2].linkId == "2.1"
assert (
inst.item[2].item[3].item[0].item[0].answer[0].valueCoding.code == "LA10419-2"
)
assert (
inst.item[2].item[3].item[0].item[0].answer[0].valueCoding.display == "Nephew"
)
assert (
inst.item[2].item[3].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[3].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[3].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[3].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[3].item[0].item[1].answer[0].valueString == "Ian"
assert (
inst.item[2].item[3].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[3].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[3].item[0].item[1].text == "Name"
assert inst.item[2].item[3].item[0].item[2].answer[0].valueCoding.code == "LA2-8"
assert inst.item[2].item[3].item[0].item[2].answer[0].valueCoding.display == "Male"
assert (
inst.item[2].item[3].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[3].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[3].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[3].item[0].item[2].text == "Gender"
assert float(
inst.item[2]
.item[3]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal
) == float(16)
assert (
inst.item[2].item[3].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[3].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.2.2"
)
assert inst.item[2].item[3].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[3].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[3].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[3].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[3].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[3].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[3].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[3].item[0].item[3].text == "Living?"
assert inst.item[2].item[3].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[3].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[3].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[3].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[3].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[3].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[3].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[3].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[3].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[3].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[3].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[3].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[3].item[0].linkId == "2.1.1"
assert inst.item[2].item[3].linkId == "2.1"
assert (
inst.item[2].item[4].item[0].item[0].answer[0].valueCoding.code == "LA10420-0"
)
assert inst.item[2].item[4].item[0].item[0].answer[0].valueCoding.display == "Niece"
assert (
inst.item[2].item[4].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[4].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[4].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[4].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[4].item[0].item[1].answer[0].valueString == "Helen"
assert (
inst.item[2].item[4].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[4].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[4].item[0].item[1].text == "Name"
assert inst.item[2].item[4].item[0].item[2].answer[0].valueCoding.code == "LA3-6"
assert (
inst.item[2].item[4].item[0].item[2].answer[0].valueCoding.display == "Female"
)
assert (
inst.item[2].item[4].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[4].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[4].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[4].item[0].item[2].text == "Gender"
assert float(
inst.item[2]
.item[4]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal
) == float(15)
assert (
inst.item[2].item[4].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[4].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.2.2"
)
assert inst.item[2].item[4].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[4].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[4].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[4].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[4].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[4].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[4].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[4].item[0].item[3].text == "Living?"
assert inst.item[2].item[4].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[4].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[4].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[4].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[4].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[4].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[4].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[4].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[4].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[4].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[4].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[4].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[4].item[0].linkId == "2.1.1"
assert inst.item[2].item[4].linkId == "2.1"
assert (
inst.item[2].item[5].item[0].item[0].answer[0].valueCoding.code == "LA10416-8"
)
assert (
inst.item[2].item[5].item[0].item[0].answer[0].valueCoding.display == "Father"
)
assert (
inst.item[2].item[5].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[5].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[5].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[5].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[5].item[0].item[1].answer[0].valueString == "Donald"
assert (
inst.item[2].item[5].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[5].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[5].item[0].item[1].text == "Name"
assert inst.item[2].item[5].item[0].item[2].answer[0].valueCoding.code == "LA2-8"
assert inst.item[2].item[5].item[0].item[2].answer[0].valueCoding.display == "Male"
assert (
inst.item[2].item[5].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[5].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[5].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[5].item[0].item[2].text == "Gender"
assert float(
inst.item[2]
.item[5]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal
) == float(52)
assert (
inst.item[2].item[5].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[5].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.2.2"
)
assert inst.item[2].item[5].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[5].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[5].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[5].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[5].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[5].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[5].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[5].item[0].item[3].text == "Living?"
assert inst.item[2].item[5].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[5].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[5].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[5].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[5].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[5].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[5].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[5].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[5].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[5].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[5].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[5].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[5].item[0].linkId == "2.1.1"
assert inst.item[2].item[5].linkId == "2.1"
assert (
inst.item[2].item[6].item[0].item[0].answer[0].valueCoding.code == "LA10425-9"
)
assert (
inst.item[2].item[6].item[0].item[0].answer[0].valueCoding.display
== "Paternal Uncle"
)
assert (
inst.item[2].item[6].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[6].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[6].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[6].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[6].item[0].item[1].answer[0].valueString == "Eric"
assert (
inst.item[2].item[6].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[6].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[6].item[0].item[1].text == "Name"
assert inst.item[2].item[6].item[0].item[2].answer[0].valueCoding.code == "LA2-8"
assert inst.item[2].item[6].item[0].item[2].answer[0].valueCoding.display == "Male"
assert (
inst.item[2].item[6].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[6].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[6].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[6].item[0].item[2].text == "Gender"
assert float(
inst.item[2]
.item[6]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal
) == float(56)
assert (
inst.item[2].item[6].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[6].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.2.2"
)
assert inst.item[2].item[6].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[6].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[6].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[6].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[6].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[6].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[6].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[6].item[0].item[3].text == "Living?"
assert inst.item[2].item[6].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[6].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[6].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[6].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[6].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[6].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[6].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[6].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[6].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[6].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[6].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[6].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[6].item[0].linkId == "2.1.1"
assert inst.item[2].item[6].linkId == "2.1"
assert (
inst.item[2].item[7].item[0].item[0].answer[0].valueCoding.code == "LA10421-8"
)
assert (
inst.item[2].item[7].item[0].item[0].answer[0].valueCoding.display
== "Paternal Aunt"
)
assert (
inst.item[2].item[7].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[7].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[7].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[7].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[7].item[0].item[1].answer[0].valueString == "Fiona"
assert (
inst.item[2].item[7].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[7].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[7].item[0].item[1].text == "Name"
assert inst.item[2].item[7].item[0].item[2].answer[0].valueCoding.code == "LA3-6"
assert (
inst.item[2].item[7].item[0].item[2].answer[0].valueCoding.display == "Female"
)
assert (
inst.item[2].item[7].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[7].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[7].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[7].item[0].item[2].text == "Gender"
assert float(
inst.item[2]
.item[7]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueDecimal
) == float(57)
assert (
inst.item[2].item[7].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54141-7"
)
assert (
inst.item[2].item[7].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.2.2"
)
assert inst.item[2].item[7].item[0].item[3].answer[0].item[0].item[0].text == "Age"
assert inst.item[2].item[7].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.2"
assert inst.item[2].item[7].item[0].item[3].answer[0].valueCoding.code == "LA33-6"
assert inst.item[2].item[7].item[0].item[3].answer[0].valueCoding.display == "Yes"
assert (
inst.item[2].item[7].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[7].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[7].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[7].item[0].item[3].text == "Living?"
assert inst.item[2].item[7].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[7].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[7].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[7].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[7].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[7].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[7].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[7].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[7].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[7].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[7].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[7].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[7].item[0].linkId == "2.1.1"
assert (
inst.item[2].item[7].item[1].item[0].answer[0].valueCoding.code == "LA10543-9"
)
assert (
inst.item[2].item[7].item[1].item[0].answer[0].valueCoding.display
== "-- Skin Cancer"
)
assert (
inst.item[2].item[7].item[1].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert inst.item[2].item[7].item[1].item[0].linkId == "2.1.2.1"
assert inst.item[2].item[7].item[1].item[0].text == "Disease or Condition"
assert inst.item[2].item[7].item[1].linkId == "2.1.2"
assert (
inst.item[2].item[7].item[1].text == "This family member's history of disease"
)
assert inst.item[2].item[7].linkId == "2.1"
assert (
inst.item[2].item[8].item[0].item[0].answer[0].valueCoding.code == "LA10423-4"
)
assert (
inst.item[2].item[8].item[0].item[0].answer[0].valueCoding.display
== "Paternal Grandfather"
)
assert (
inst.item[2].item[8].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[8].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[8].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[8].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[8].item[0].item[1].answer[0].valueString == "Bob"
assert (
inst.item[2].item[8].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[8].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[8].item[0].item[1].text == "Name"
assert inst.item[2].item[8].item[0].item[2].answer[0].valueCoding.code == "LA2-8"
assert inst.item[2].item[8].item[0].item[2].answer[0].valueCoding.display == "Male"
assert (
inst.item[2].item[8].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[8].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[8].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[8].item[0].item[2].text == "Gender"
assert (
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.code
== "LA10537-1"
)
assert (
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.display
== "-- Colon Cancer"
)
assert (
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54112-8"
)
assert (
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.1.1"
)
assert (
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[0].text
== "Cause of Death"
)
assert (
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.code
== "LA10400-2"
)
assert (
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.display
== "OVER 60"
)
assert (
inst.item[2]
.item[8]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54113-6"
)
assert (
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[1].linkId
== "2.1.1.4.1.2"
)
assert (
inst.item[2].item[8].item[0].item[3].answer[0].item[0].item[1].text
== "Age at Death"
)
assert inst.item[2].item[8].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.1"
assert inst.item[2].item[8].item[0].item[3].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[8].item[0].item[3].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[8].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[8].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[8].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[8].item[0].item[3].text == "Living?"
assert inst.item[2].item[8].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[8].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[8].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[8].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[8].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[8].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[8].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[8].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[8].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[8].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[8].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[8].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[8].item[0].linkId == "2.1.1"
assert (
inst.item[2].item[8].item[1].item[0].answer[0].valueCoding.code == "LA10537-1"
)
assert (
inst.item[2].item[8].item[1].item[0].answer[0].valueCoding.display
== "-- Colon Cancer"
)
assert (
inst.item[2].item[8].item[1].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert inst.item[2].item[8].item[1].item[0].linkId == "2.1.2.1"
assert inst.item[2].item[8].item[1].item[0].text == "Disease or Condition"
assert (
inst.item[2].item[8].item[1].item[1].answer[0].valueCoding.code == "LA10400-2"
)
assert (
inst.item[2].item[8].item[1].item[1].answer[0].valueCoding.display == "OVER 60"
)
assert (
inst.item[2].item[8].item[1].item[1].answer[0].valueCoding.system
== "http://loinc.org"
)
assert inst.item[2].item[8].item[1].item[1].linkId == "2.1.2.2"
assert inst.item[2].item[8].item[1].item[1].text == "Age at Diagnosis"
assert inst.item[2].item[8].item[1].linkId == "2.1.2"
assert (
inst.item[2].item[8].item[1].text == "This family member's history of disease"
)
assert inst.item[2].item[8].linkId == "2.1"
assert (
inst.item[2].item[9].item[0].item[0].answer[0].valueCoding.code == "LA10424-2"
)
assert (
inst.item[2].item[9].item[0].item[0].answer[0].valueCoding.display
== "Paternal Grandmother"
)
assert (
inst.item[2].item[9].item[0].item[0].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[9].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54136-7"
)
assert inst.item[2].item[9].item[0].item[0].linkId == "2.1.1.1"
assert inst.item[2].item[9].item[0].item[0].text == "Relationship to you"
assert inst.item[2].item[9].item[0].item[1].answer[0].valueString == "Claire"
assert (
inst.item[2].item[9].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54138-3"
)
assert inst.item[2].item[9].item[0].item[1].linkId == "2.1.1.2"
assert inst.item[2].item[9].item[0].item[1].text == "Name"
assert inst.item[2].item[9].item[0].item[2].answer[0].valueCoding.code == "LA3-6"
assert (
inst.item[2].item[9].item[0].item[2].answer[0].valueCoding.display == "Female"
)
assert (
inst.item[2].item[9].item[0].item[2].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[9].item[0].item[2].definition
== "http://loinc.org/fhir/DataElement/54123-5"
)
assert inst.item[2].item[9].item[0].item[2].linkId == "2.1.1.3"
assert inst.item[2].item[9].item[0].item[2].text == "Gender"
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.item[0]
.answer[0]
.valueString
== "Lou Gehrigs"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.item[0]
.linkId
== "2.1.1.4.1.1.1"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.item[0]
.text
== "Please specify"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.code
== "LA10589-2"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.display
== "-- Other/Unexpected"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[0]
.answer[0]
.valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[0].definition
== "http://loinc.org/fhir/DataElement/54112-8"
)
assert (
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[0].linkId
== "2.1.1.4.1.1"
)
assert (
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[0].text
== "Cause of Death"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.code
== "LA10400-2"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.display
== "OVER 60"
)
assert (
inst.item[2]
.item[9]
.item[0]
.item[3]
.answer[0]
.item[0]
.item[1]
.answer[0]
.valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[1].definition
== "http://loinc.org/fhir/DataElement/54113-6"
)
assert (
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[1].linkId
== "2.1.1.4.1.2"
)
assert (
inst.item[2].item[9].item[0].item[3].answer[0].item[0].item[1].text
== "Age at Death"
)
assert inst.item[2].item[9].item[0].item[3].answer[0].item[0].linkId == "2.1.1.4.1"
assert inst.item[2].item[9].item[0].item[3].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[9].item[0].item[3].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[9].item[0].item[3].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[9].item[0].item[3].definition
== "http://loinc.org/fhir/DataElement/54139-1"
)
assert inst.item[2].item[9].item[0].item[3].linkId == "2.1.1.4"
assert inst.item[2].item[9].item[0].item[3].text == "Living?"
assert inst.item[2].item[9].item[0].item[4].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[9].item[0].item[4].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[9].item[0].item[4].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[9].item[0].item[4].definition
== "http://loinc.org/fhir/DataElement/54121-9"
)
assert inst.item[2].item[9].item[0].item[4].linkId == "2.1.1.5"
assert inst.item[2].item[9].item[0].item[4].text == "Was this person born a twin?"
assert inst.item[2].item[9].item[0].item[5].answer[0].valueCoding.code == "LA32-8"
assert inst.item[2].item[9].item[0].item[5].answer[0].valueCoding.display == "No"
assert (
inst.item[2].item[9].item[0].item[5].answer[0].valueCoding.system
== "http://loinc.org"
)
assert (
inst.item[2].item[9].item[0].item[5].definition
== "http://loinc.org/fhir/DataElement/54122-7"
)
assert inst.item[2].item[9].item[0].item[5].linkId == "2.1.1.6"
assert inst.item[2].item[9].item[0].item[5].text == "Was this person adopted?"
assert inst.item[2].item[9].item[0].linkId == "2.1.1"
assert inst.item[2].item[9].linkId == "2.1"
assert inst.item[2].linkId == "2"
assert inst.item[2].text == "Family member health information"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.questionnaire == "http://hl7.org/fhir/Questionnaire/ussg-fht"
assert inst.status == "in-progress"
assert inst.subject.reference == "http://hl7.org/fhir/Patient/proband"
assert inst.subject.type == "Patient"
assert inst.text.div == (
"<div " 'xmlns="http://www.w3.org/1999/xhtml"><div>todo</div></div>'
)
assert inst.text.status == "generated"
def test_questionnaireresponse_3(base_settings):
"""No. 3 tests collection for QuestionnaireResponse.
Test File: questionnaireresponse-example-ussg-fht-answers.json
"""
filename = (
base_settings["unittest_data_dir"]
/ "questionnaireresponse-example-ussg-fht-answers.json"
)
inst = questionnaireresponse.QuestionnaireResponse.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "QuestionnaireResponse" == inst.resource_type
impl_questionnaireresponse_3(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "QuestionnaireResponse" == data["resourceType"]
inst2 = questionnaireresponse.QuestionnaireResponse(**data)
impl_questionnaireresponse_3(inst2)
def impl_questionnaireresponse_4(inst):
assert inst.authored == fhirtypes.DateTime.validate("2014-12-11T04:44:16Z")
assert inst.id == "gcs"
assert inst.item[0].answer[0].valueCoding.code == "LA6560-2"
assert inst.item[0].answer[0].valueCoding.display == "Confused"
assert (
inst.item[0].answer[0].valueCoding.extension[0].url
== "http://hl7.org/fhir/StructureDefinition/ordinalValue"
)
assert float(inst.item[0].answer[0].valueCoding.extension[0].valueDecimal) == float(
4
)
assert inst.item[0].answer[0].valueCoding.system == "http://loinc.org"
assert inst.item[0].linkId == "1.1"
assert inst.item[1].answer[0].valueCoding.code == "LA6566-9"
assert inst.item[1].answer[0].valueCoding.display == "Localizing pain"
assert (
inst.item[1].answer[0].valueCoding.extension[0].url
== "http://hl7.org/fhir/StructureDefinition/ordinalValue"
)
assert float(inst.item[1].answer[0].valueCoding.extension[0].valueDecimal) == float(
5
)
assert inst.item[1].answer[0].valueCoding.system == "http://loinc.org"
assert inst.item[1].linkId == "1.2"
assert inst.item[2].answer[0].valueCoding.code == "LA6556-0"
assert inst.item[2].answer[0].valueCoding.display == "Eyes open spontaneously"
assert (
inst.item[2].answer[0].valueCoding.extension[0].url
== "http://hl7.org/fhir/StructureDefinition/ordinalValue"
)
assert float(inst.item[2].answer[0].valueCoding.extension[0].valueDecimal) == float(
4
)
assert inst.item[2].answer[0].valueCoding.system == "http://loinc.org"
assert inst.item[2].linkId == "1.3"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.questionnaire == "http://hl7.org/fhir/Questionnaire/gcs"
assert inst.source.reference == "Practitioner/f007"
assert inst.status == "completed"
assert inst.subject.display == "Peter James Chalmers"
assert inst.subject.reference == "Patient/example"
assert inst.text.div == (
"<div " 'xmlns="http://www.w3.org/1999/xhtml"><div>todo</div></div>'
)
assert inst.text.status == "generated"
def test_questionnaireresponse_4(base_settings):
"""No. 4 tests collection for QuestionnaireResponse.
Test File: questionnaireresponse-example-gcs.json
"""
filename = (
base_settings["unittest_data_dir"] / "questionnaireresponse-example-gcs.json"
)
inst = questionnaireresponse.QuestionnaireResponse.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "QuestionnaireResponse" == inst.resource_type
impl_questionnaireresponse_4(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "QuestionnaireResponse" == data["resourceType"]
inst2 = questionnaireresponse.QuestionnaireResponse(**data)
impl_questionnaireresponse_4(inst2)
def impl_questionnaireresponse_5(inst):
assert inst.author.reference == "http://hl7.org/fhir/Practitioner/example"
assert inst.author.type == "Practitioner"
assert inst.authored == fhirtypes.DateTime.validate("2013-02-19T14:15:00+10:00")
assert inst.id == "bb"
assert inst.item[0].item[0].item[0].answer[0].valueString == "Cathy Jones"
assert inst.item[0].item[0].item[0].linkId == "nameOfChild"
assert inst.item[0].item[0].item[0].text == "Name of child"
assert inst.item[0].item[0].item[1].answer[0].valueCoding.code == "F"
assert inst.item[0].item[0].item[1].linkId == "sex"
assert inst.item[0].item[0].item[1].text == "Sex"
assert inst.item[0].item[0].linkId == "group"
assert float(inst.item[0].item[1].item[0].answer[0].valueDecimal) == float(3.25)
assert inst.item[0].item[1].item[0].linkId == "birthWeight"
assert inst.item[0].item[1].item[0].text == "Birth weight (kg)"
assert float(inst.item[0].item[1].item[1].answer[0].valueDecimal) == float(44.3)
assert inst.item[0].item[1].item[1].linkId == "birthLength"
assert inst.item[0].item[1].item[1].text == "Birth length (cm)"
assert inst.item[0].item[1].item[2].answer[0].item[0].item[0].answer[
0
].valueDate == fhirtypes.Date.validate("1972-11-30")
assert (
inst.item[0].item[1].item[2].answer[0].item[0].item[0].linkId == "vitaminKDose1"
)
assert inst.item[0].item[1].item[2].answer[0].item[0].item[0].text == "1st dose"
assert inst.item[0].item[1].item[2].answer[0].item[0].item[1].answer[
0
].valueDate == fhirtypes.Date.validate("1972-12-11")
assert (
inst.item[0].item[1].item[2].answer[0].item[0].item[1].linkId == "vitaminKDose2"
)
assert inst.item[0].item[1].item[2].answer[0].item[0].item[1].text == "2nd dose"
assert inst.item[0].item[1].item[2].answer[0].item[0].linkId == "vitaminKgivenDoses"
assert inst.item[0].item[1].item[2].answer[0].valueCoding.code == "INJECTION"
assert inst.item[0].item[1].item[2].linkId == "vitaminKgiven"
assert inst.item[0].item[1].item[2].text == "Vitamin K given"
assert inst.item[0].item[1].item[3].answer[0].item[0].answer[
0
].valueDate == fhirtypes.Date.validate("1972-12-04")
assert inst.item[0].item[1].item[3].answer[0].item[0].linkId == "hepBgivenDate"
assert inst.item[0].item[1].item[3].answer[0].item[0].text == "Date given"
assert inst.item[0].item[1].item[3].answer[0].valueBoolean is True
assert inst.item[0].item[1].item[3].linkId == "hepBgiven"
assert inst.item[0].item[1].item[3].text == "Hep B given y / n"
assert (
inst.item[0].item[1].item[4].answer[0].valueString
== "Already able to speak Chinese"
)
assert inst.item[0].item[1].item[4].linkId == "abnormalitiesAtBirth"
assert inst.item[0].item[1].item[4].text == "Abnormalities noted at birth"
assert inst.item[0].item[1].linkId == "neonatalInformation"
assert inst.item[0].item[1].text == "Neonatal Information"
assert inst.item[0].linkId == "birthDetails"
assert inst.item[0].text == "Birth details - To be completed by health professional"
assert inst.meta.tag[0].code == "HTEST"
assert inst.meta.tag[0].display == "test health data"
assert (
inst.meta.tag[0].system == "http://terminology.hl7.org/CodeSystem/v3-ActReason"
)
assert inst.status == "completed"
assert inst.subject.reference == "http://hl7.org/fhir/Patient/1"
assert inst.subject.type == "Patient"
assert inst.text.status == "generated"
def test_questionnaireresponse_5(base_settings):
"""No. 5 tests collection for QuestionnaireResponse.
Test File: questionnaireresponse-example-bluebook.json
"""
filename = (
base_settings["unittest_data_dir"]
/ "questionnaireresponse-example-bluebook.json"
)
inst = questionnaireresponse.QuestionnaireResponse.parse_file(
filename, content_type="application/json", encoding="utf-8"
)
assert "QuestionnaireResponse" == inst.resource_type
impl_questionnaireresponse_5(inst)
# testing reverse by generating data from itself and create again.
data = inst.dict()
assert "QuestionnaireResponse" == data["resourceType"]
inst2 = questionnaireresponse.QuestionnaireResponse(**data)
impl_questionnaireresponse_5(inst2)
| 39.473356
| 88
| 0.589766
| 10,740
| 69,631
| 3.816387
| 0.036778
| 0.101615
| 0.212452
| 0.175295
| 0.942227
| 0.926174
| 0.907363
| 0.886162
| 0.827779
| 0.80626
| 0
| 0.079341
| 0.196866
| 69,631
| 1,763
| 89
| 39.495746
| 0.653595
| 0.015137
| 0
| 0.422353
| 0
| 0.001765
| 0.160515
| 0.0073
| 0
| 0
| 0
| 0
| 0.418235
| 1
| 0.005882
| false
| 0
| 0.001765
| 0
| 0.007647
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9ae35fd2f861db90faa296b2c3cb168bd8403548
| 6,842
|
py
|
Python
|
RasPi_Dev/ros_ws/build/vslam/rtabmap_ros/catkin_generated/pkg.installspace.context.pc.py
|
QianheYu/xtark_driver_dev
|
1708888161cf20c0d1f45c99d0da4467d69c26c8
|
[
"BSD-3-Clause"
] | 1
|
2022-03-11T03:31:15.000Z
|
2022-03-11T03:31:15.000Z
|
RasPi_Dev/ros_ws/build/vslam/rtabmap_ros/catkin_generated/pkg.installspace.context.pc.py
|
bravetree/xtark_driver_dev
|
1708888161cf20c0d1f45c99d0da4467d69c26c8
|
[
"BSD-3-Clause"
] | null | null | null |
RasPi_Dev/ros_ws/build/vslam/rtabmap_ros/catkin_generated/pkg.installspace.context.pc.py
|
bravetree/xtark_driver_dev
|
1708888161cf20c0d1f45c99d0da4467d69c26c8
|
[
"BSD-3-Clause"
] | null | null | null |
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "/home/xtark/ros_ws/install/include;/home/xtark/ros_ws/devel/lib/rtabmap-0.19/../../include/rtabmap-0.19;/opt/ros/kinetic/include/opencv-3.3.1-dev;/opt/ros/kinetic/include/opencv-3.3.1-dev/opencv".split(';') if "/home/xtark/ros_ws/install/include;/home/xtark/ros_ws/devel/lib/rtabmap-0.19/../../include/rtabmap-0.19;/opt/ros/kinetic/include/opencv-3.3.1-dev;/opt/ros/kinetic/include/opencv-3.3.1-dev/opencv" != "" else []
PROJECT_CATKIN_DEPENDS = "cv_bridge;roscpp;rospy;sensor_msgs;std_msgs;std_srvs;nav_msgs;geometry_msgs;visualization_msgs;image_transport;tf;tf_conversions;tf2_ros;eigen_conversions;laser_geometry;pcl_conversions;pcl_ros;nodelet;dynamic_reconfigure;message_filters;class_loader;rosgraph_msgs;stereo_msgs;move_base_msgs;image_geometry;costmap_2d;rviz".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lrtabmap_ros;/home/xtark/ros_ws/devel/lib/librtabmap_core.so;/home/xtark/ros_ws/devel/lib/librtabmap_utilite.so;/home/xtark/ros_ws/devel/lib/librtabmap_gui.so;/usr/lib/arm-linux-gnueabihf/libz.so;/usr/local/lib/libg2o_core.so;/usr/local/lib/libg2o_types_slam2d.so;/usr/local/lib/libg2o_types_slam3d.so;/usr/local/lib/libg2o_types_sba.so;/usr/local/lib/libg2o_stuff.so;/usr/local/lib/libg2o_solver_csparse.so;/usr/local/lib/libg2o_csparse_extension.so;/usr/lib/arm-linux-gnueabihf/libcxsparse.so;/usr/local/lib/libg2o_solver_cholmod.so;/usr/lib/arm-linux-gnueabihf/libcholmod.so;/usr/lib/libOpenNI2.so;/opt/ros/kinetic/lib/liboctomap.so;/opt/ros/kinetic/lib/liboctomath.so;/opt/ros/kinetic/lib/libopencv_calib3d3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_core3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_dnn3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_features2d3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_flann3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_highgui3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_imgcodecs3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_imgproc3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_ml3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_objdetect3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_photo3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_shape3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_stitching3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_superres3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_video3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_videoio3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_videostab3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_viz3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_aruco3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_bgsegm3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_bioinspired3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_ccalib3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_cvv3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_datasets3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_dpm3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_face3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_fuzzy3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_hdf3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_img_hash3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_line_descriptor3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_optflow3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_phase_unwrapping3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_plot3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_reg3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_rgbd3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_saliency3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_stereo3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_structured_light3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_surface_matching3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_text3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_tracking3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_xfeatures2d3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_ximgproc3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_xobjdetect3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_xphoto3.so.3.3.1".split(';') if "-lrtabmap_ros;/home/xtark/ros_ws/devel/lib/librtabmap_core.so;/home/xtark/ros_ws/devel/lib/librtabmap_utilite.so;/home/xtark/ros_ws/devel/lib/librtabmap_gui.so;/usr/lib/arm-linux-gnueabihf/libz.so;/usr/local/lib/libg2o_core.so;/usr/local/lib/libg2o_types_slam2d.so;/usr/local/lib/libg2o_types_slam3d.so;/usr/local/lib/libg2o_types_sba.so;/usr/local/lib/libg2o_stuff.so;/usr/local/lib/libg2o_solver_csparse.so;/usr/local/lib/libg2o_csparse_extension.so;/usr/lib/arm-linux-gnueabihf/libcxsparse.so;/usr/local/lib/libg2o_solver_cholmod.so;/usr/lib/arm-linux-gnueabihf/libcholmod.so;/usr/lib/libOpenNI2.so;/opt/ros/kinetic/lib/liboctomap.so;/opt/ros/kinetic/lib/liboctomath.so;/opt/ros/kinetic/lib/libopencv_calib3d3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_core3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_dnn3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_features2d3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_flann3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_highgui3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_imgcodecs3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_imgproc3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_ml3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_objdetect3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_photo3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_shape3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_stitching3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_superres3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_video3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_videoio3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_videostab3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_viz3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_aruco3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_bgsegm3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_bioinspired3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_ccalib3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_cvv3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_datasets3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_dpm3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_face3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_fuzzy3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_hdf3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_img_hash3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_line_descriptor3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_optflow3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_phase_unwrapping3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_plot3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_reg3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_rgbd3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_saliency3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_stereo3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_structured_light3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_surface_matching3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_text3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_tracking3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_xfeatures2d3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_ximgproc3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_xobjdetect3.so.3.3.1;/opt/ros/kinetic/lib/libopencv_xphoto3.so.3.3.1" != "" else []
PROJECT_NAME = "rtabmap_ros"
PROJECT_SPACE_DIR = "/home/xtark/ros_ws/install"
PROJECT_VERSION = "0.19.3"
| 760.222222
| 5,828
| 0.80152
| 1,325
| 6,842
| 3.997736
| 0.115472
| 0.111006
| 0.240514
| 0.283934
| 0.906362
| 0.902398
| 0.902398
| 0.902398
| 0.902398
| 0.902398
| 0
| 0.061443
| 0.0057
| 6,842
| 8
| 5,829
| 855.25
| 0.717184
| 0.007892
| 0
| 0
| 1
| 0.714286
| 0.96036
| 0.957265
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 15
|
9af25c3737a21381d7ab5162b400d6fcb877fb70
| 60,552
|
py
|
Python
|
stubs.min/System/Windows/Documents/__init___parts/Typography.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | 1
|
2017-07-25T14:30:18.000Z
|
2017-07-25T14:30:18.000Z
|
stubs.min/System/Windows/Documents/__init___parts/Typography.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
stubs.min/System/Windows/Documents/__init___parts/Typography.py
|
hdm-dt-fb/ironpython-stubs
|
4d2b405eda3ceed186e8adca55dd97c332c6f49d
|
[
"MIT"
] | null | null | null |
class Typography(object):
""" Provides access to a rich set of OpenType typography properties. """
@staticmethod
def GetAnnotationAlternates(element):
"""
GetAnnotationAlternates(element: DependencyObject) -> int
Returns the value of the
System.Windows.Documents.Typography.AnnotationAlternates�attached property for
a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.AnnotationAlternates property.
Returns: The current value of the System.Windows.Documents.TextElement.FontFamily
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetCapitals(element):
"""
GetCapitals(element: DependencyObject) -> FontCapitals
Returns the value of the System.Windows.Documents.Typography.Capitals�attached
property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.Capitals property.
Returns: The current value of the System.Windows.Documents.Typography.Capitals attached
property on the specified dependency object.
"""
pass
@staticmethod
def GetCapitalSpacing(element):
"""
GetCapitalSpacing(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.CapitalSpacing�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.CapitalSpacing property.
Returns: The current value of the System.Windows.Documents.Typography.CapitalSpacing
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetCaseSensitiveForms(element):
"""
GetCaseSensitiveForms(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.CaseSensitiveForms�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.CaseSensitiveForms property.
Returns: The current value of the System.Windows.Documents.Typography.CaseSensitiveForms
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetContextualAlternates(element):
"""
GetContextualAlternates(element: DependencyObject) -> bool
Returns the value of the
System.Windows.Documents.Typography.ContextualAlternates�attached property for
a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.ContextualAlternates property.
Returns: The current value of the
System.Windows.Documents.Typography.ContextualAlternates attached property on
the specified dependency object.
"""
pass
@staticmethod
def GetContextualLigatures(element):
"""
GetContextualLigatures(element: DependencyObject) -> bool
Returns the value of the
System.Windows.Documents.Typography.ContextualLigatures�attached property for a
specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.ContextualLigatures property.
Returns: The current value of the
System.Windows.Documents.Typography.ContextualLigatures attached property on
the specified dependency object.
"""
pass
@staticmethod
def GetContextualSwashes(element):
"""
GetContextualSwashes(element: DependencyObject) -> int
Returns the value of the System.Windows.Documents.Typography.ContextualSwashes�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.ContextualSwashes property.
Returns: The current value of the System.Windows.Documents.Typography.ContextualSwashes
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetDiscretionaryLigatures(element):
"""
GetDiscretionaryLigatures(element: DependencyObject) -> bool
Returns the value of the
System.Windows.Documents.Typography.DiscretionaryLigatures�attached property
for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.DiscretionaryLigatures property.
Returns: The current value of the
System.Windows.Documents.Typography.DiscretionaryLigatures attached property on
the specified dependency object.
"""
pass
@staticmethod
def GetEastAsianExpertForms(element):
"""
GetEastAsianExpertForms(element: DependencyObject) -> bool
Returns the value of the
System.Windows.Documents.Typography.EastAsianExpertForms�attached property for
a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.EastAsianExpertForms property.
Returns: The current value of the
System.Windows.Documents.Typography.EastAsianExpertForms attached property on
the specified dependency object.
"""
pass
@staticmethod
def GetEastAsianLanguage(element):
"""
GetEastAsianLanguage(element: DependencyObject) -> FontEastAsianLanguage
Returns the value of the System.Windows.Documents.Typography.EastAsianLanguage�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.EastAsianLanguage property.
Returns: The current value of the System.Windows.Documents.Typography.EastAsianLanguage
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetEastAsianWidths(element):
"""
GetEastAsianWidths(element: DependencyObject) -> FontEastAsianWidths
Returns the value of the System.Windows.Documents.Typography.EastAsianWidths�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.EastAsianWidths property.
Returns: The current value of the System.Windows.Documents.Typography.EastAsianWidths
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetFraction(element):
"""
GetFraction(element: DependencyObject) -> FontFraction
Returns the value of the System.Windows.Documents.Typography.Fraction�attached
property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.Fraction property.
Returns: The current value of the System.Windows.Documents.Typography.Fraction attached
property on the specified dependency object.
"""
pass
@staticmethod
def GetHistoricalForms(element):
"""
GetHistoricalForms(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.HistoricalForms�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.HistoricalForms property.
Returns: The current value of the System.Windows.Documents.Typography.HistoricalForms
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetHistoricalLigatures(element):
"""
GetHistoricalLigatures(element: DependencyObject) -> bool
Returns the value of the
System.Windows.Documents.Typography.HistoricalLigatures�attached property for a
specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.HistoricalLigatures property.
Returns: The current value of the
System.Windows.Documents.Typography.HistoricalLigatures attached property on
the specified dependency object.
"""
pass
@staticmethod
def GetKerning(element):
"""
GetKerning(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.Kerning�attached
property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.Kerning property.
Returns: The current value of the System.Windows.Documents.Typography.Kerning attached
property on the specified dependency object.
"""
pass
@staticmethod
def GetMathematicalGreek(element):
"""
GetMathematicalGreek(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.MathematicalGreek�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.MathematicalGreek property.
Returns: The current value of the System.Windows.Documents.Typography.MathematicalGreek
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetNumeralAlignment(element):
"""
GetNumeralAlignment(element: DependencyObject) -> FontNumeralAlignment
Returns the value of the System.Windows.Documents.Typography.NumeralAlignment�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.NumeralAlignment property.
Returns: The current value of the System.Windows.Documents.Typography.NumeralAlignment
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetNumeralStyle(element):
"""
GetNumeralStyle(element: DependencyObject) -> FontNumeralStyle
Returns the value of the System.Windows.Documents.Typography.NumeralStyle�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.NumeralStyle property.
Returns: The current value of the System.Windows.Documents.Typography.NumeralStyle
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetSlashedZero(element):
"""
GetSlashedZero(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.SlashedZero�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.SlashedZero property.
Returns: The current value of the System.Windows.Documents.Typography.SlashedZero
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStandardLigatures(element):
"""
GetStandardLigatures(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StandardLigatures�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StandardLigatures property.
Returns: The current value of the System.Windows.Documents.Typography.StandardLigatures
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStandardSwashes(element):
"""
GetStandardSwashes(element: DependencyObject) -> int
Returns the value of the System.Windows.Documents.Typography.StandardSwashes�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StandardSwashes property.
Returns: The current value of the System.Windows.Documents.Typography.StandardSwashes
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticAlternates(element):
"""
GetStylisticAlternates(element: DependencyObject) -> int
Returns the value of the
System.Windows.Documents.Typography.StylisticAlternates�attached property for a
specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticAlternates property.
Returns: The current value of the
System.Windows.Documents.Typography.StylisticAlternates attached property on
the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet1(element):
"""
GetStylisticSet1(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet1�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet1 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet1
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet10(element):
"""
GetStylisticSet10(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet10�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet10 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet10
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet11(element):
"""
GetStylisticSet11(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet11�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet11 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet11
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet12(element):
"""
GetStylisticSet12(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet12�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet12 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet12
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet13(element):
"""
GetStylisticSet13(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet13�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet13 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet13
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet14(element):
"""
GetStylisticSet14(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet14�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet14 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet14
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet15(element):
"""
GetStylisticSet15(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet15�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet15 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet15
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet16(element):
"""
GetStylisticSet16(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet16�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet16 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet16
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet17(element):
"""
GetStylisticSet17(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet17�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet17 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet17
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet18(element):
"""
GetStylisticSet18(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet18�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet18 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet18
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet19(element):
"""
GetStylisticSet19(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet19�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet19 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet19
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet2(element):
"""
GetStylisticSet2(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet2�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet2 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet2
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet20(element):
"""
GetStylisticSet20(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet20�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet20 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet20
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet3(element):
"""
GetStylisticSet3(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet3�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet3 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet3
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet4(element):
"""
GetStylisticSet4(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet4�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet4 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet4
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet5(element):
"""
GetStylisticSet5(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet5�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet5 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet5
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet6(element):
"""
GetStylisticSet6(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet6�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet6 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet6
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet7(element):
"""
GetStylisticSet7(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet7�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet7 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet7
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet8(element):
"""
GetStylisticSet8(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet8�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet8 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet8
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetStylisticSet9(element):
"""
GetStylisticSet9(element: DependencyObject) -> bool
Returns the value of the System.Windows.Documents.Typography.StylisticSet8�
attached property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.StylisticSet8 property.
Returns: The current value of the System.Windows.Documents.Typography.StylisticSet8
attached property on the specified dependency object.
"""
pass
@staticmethod
def GetVariants(element):
"""
GetVariants(element: DependencyObject) -> FontVariants
Returns the value of the System.Windows.Documents.Typography.Variants�attached
property for a specified dependency object.
element: The dependency object for which to retrieve the value of the
System.Windows.Documents.Typography.Variants property.
Returns: The current value of the System.Windows.Documents.Typography.Variants attached
property on the specified dependency object.
"""
pass
@staticmethod
def SetAnnotationAlternates(element,value):
"""
SetAnnotationAlternates(element: DependencyObject,value: int)
Sets the value of the System.Windows.Documents.Typography.AnnotationAlternates�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.AnnotationAlternates property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetCapitals(element,value):
"""
SetCapitals(element: DependencyObject,value: FontCapitals)
Sets the value of the System.Windows.Documents.Typography.Capitals�attached
property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.Capitals property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetCapitalSpacing(element,value):
"""
SetCapitalSpacing(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.CapitalSpacing�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.CapitalSpacing property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetCaseSensitiveForms(element,value):
"""
SetCaseSensitiveForms(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.CaseSensitiveForms�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.CaseSensitiveForms property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetContextualAlternates(element,value):
"""
SetContextualAlternates(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.ContextualAlternates�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.ContextualAlternates property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetContextualLigatures(element,value):
"""
SetContextualLigatures(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.ContextualLigatures�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.ContextualLigatures property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetContextualSwashes(element,value):
"""
SetContextualSwashes(element: DependencyObject,value: int)
Sets the value of the System.Windows.Documents.Typography.ContextualSwashes�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.ContextualSwashes property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetDiscretionaryLigatures(element,value):
"""
SetDiscretionaryLigatures(element: DependencyObject,value: bool)
Sets the value of the
System.Windows.Documents.Typography.DiscretionaryLigatures�attached property
for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.DiscretionaryLigatures property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetEastAsianExpertForms(element,value):
"""
SetEastAsianExpertForms(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.EastAsianExpertForms�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.EastAsianExpertForms property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetEastAsianLanguage(element,value):
"""
SetEastAsianLanguage(element: DependencyObject,value: FontEastAsianLanguage)
Sets the value of the System.Windows.Documents.Typography.EastAsianLanguage�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.EastAsianLanguage property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetEastAsianWidths(element,value):
"""
SetEastAsianWidths(element: DependencyObject,value: FontEastAsianWidths)
Sets the value of the System.Windows.Documents.Typography.EastAsianWidths�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.EastAsianWidths property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetFraction(element,value):
"""
SetFraction(element: DependencyObject,value: FontFraction)
Sets the value of the System.Windows.Documents.Typography.Fraction�attached
property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.Fraction property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetHistoricalForms(element,value):
"""
SetHistoricalForms(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.HistoricalForms�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.HistoricalForms property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetHistoricalLigatures(element,value):
"""
SetHistoricalLigatures(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.HistoricalLigatures�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.HistoricalLigatures property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetKerning(element,value):
"""
SetKerning(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.Kerning�attached
property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.Kerning property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetMathematicalGreek(element,value):
"""
SetMathematicalGreek(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.MathematicalGreek�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.MathematicalGreek property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetNumeralAlignment(element,value):
"""
SetNumeralAlignment(element: DependencyObject,value: FontNumeralAlignment)
Sets the value of the System.Windows.Documents.Typography.NumeralAlignment�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.NumeralAlignment property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetNumeralStyle(element,value):
"""
SetNumeralStyle(element: DependencyObject,value: FontNumeralStyle)
Sets the value of the System.Windows.Documents.Typography.NumeralStyle�attached
property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.NumeralStyle property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetSlashedZero(element,value):
"""
SetSlashedZero(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.SlashedZero�attached
property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.SlashedZero property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStandardLigatures(element,value):
"""
SetStandardLigatures(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StandardLigatures�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StandardLigatures property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStandardSwashes(element,value):
"""
SetStandardSwashes(element: DependencyObject,value: int)
Sets the value of the System.Windows.Documents.Typography.StandardSwashes�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StandardSwashes property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticAlternates(element,value):
"""
SetStylisticAlternates(element: DependencyObject,value: int)
Sets the value of the System.Windows.Documents.Typography.StylisticAlternates�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticAlternates property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet1(element,value):
"""
SetStylisticSet1(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet1�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet1 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet10(element,value):
"""
SetStylisticSet10(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet10�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet10 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet11(element,value):
"""
SetStylisticSet11(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet11�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet11 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet12(element,value):
"""
SetStylisticSet12(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet12�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet12 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet13(element,value):
"""
SetStylisticSet13(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet13�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet13 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet14(element,value):
"""
SetStylisticSet14(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet14�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet14 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet15(element,value):
"""
SetStylisticSet15(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet15�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet15 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet16(element,value):
"""
SetStylisticSet16(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet16�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet16 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet17(element,value):
"""
SetStylisticSet17(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet17�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet17 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet18(element,value):
"""
SetStylisticSet18(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet18�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet18 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet19(element,value):
"""
SetStylisticSet19(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet19�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet19 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet2(element,value):
"""
SetStylisticSet2(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet2�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet2 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet20(element,value):
"""
SetStylisticSet20(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet20�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet20 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet3(element,value):
"""
SetStylisticSet3(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet3�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet3 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet4(element,value):
"""
SetStylisticSet4(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet4�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet4 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet5(element,value):
"""
SetStylisticSet5(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet5�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet5 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet6(element,value):
"""
SetStylisticSet6(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet6�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet6 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet7(element,value):
"""
SetStylisticSet7(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet7�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet7 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet8(element,value):
"""
SetStylisticSet8(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet8�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet8 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetStylisticSet9(element,value):
"""
SetStylisticSet9(element: DependencyObject,value: bool)
Sets the value of the System.Windows.Documents.Typography.StylisticSet9�
attached property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.StylisticSet9 property.
value: The new value to set the property to.
"""
pass
@staticmethod
def SetVariants(element,value):
"""
SetVariants(element: DependencyObject,value: FontVariants)
Sets the value of the System.Windows.Documents.Typography.Variants�attached
property for a specified dependency object.
element: The dependency object for which to set the value of the
System.Windows.Documents.Typography.Variants property.
value: The new value to set the property to.
"""
pass
AnnotationAlternates=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that specifies the index of an alternate annotation form.
Get: AnnotationAlternates(self: Typography) -> int
Set: AnnotationAlternates(self: Typography)=value
"""
Capitals=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a System.Windows.FontCapitals enumerated value that indicates the capital form of the selected font.
Get: Capitals(self: Typography) -> FontCapitals
Set: Capitals(self: Typography)=value
"""
CapitalSpacing=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that determines whether inter-glyph spacing for all-capital text is globally adjusted to improve readability.
Get: CapitalSpacing(self: Typography) -> bool
Set: CapitalSpacing(self: Typography)=value
"""
CaseSensitiveForms=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that determines whether glyphs adjust their vertical position to better align with uppercase glyphs.
Get: CaseSensitiveForms(self: Typography) -> bool
Set: CaseSensitiveForms(self: Typography)=value
"""
ContextualAlternates=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that determines whether custom glyph forms can be used based upon the context of the text being rendered.
Get: ContextualAlternates(self: Typography) -> bool
Set: ContextualAlternates(self: Typography)=value
"""
ContextualLigatures=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that determines whether contextual ligatures are enabled.
Get: ContextualLigatures(self: Typography) -> bool
Set: ContextualLigatures(self: Typography)=value
"""
ContextualSwashes=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that specifies the index of a contextual swashes form.
Get: ContextualSwashes(self: Typography) -> int
Set: ContextualSwashes(self: Typography)=value
"""
DiscretionaryLigatures=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that determines whether discretionary ligatures are enabled.
Get: DiscretionaryLigatures(self: Typography) -> bool
Set: DiscretionaryLigatures(self: Typography)=value
"""
EastAsianExpertForms=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that determines whether the standard Japanese font forms have been replaced with the corresponding preferred typographic forms.
Get: EastAsianExpertForms(self: Typography) -> bool
Set: EastAsianExpertForms(self: Typography)=value
"""
EastAsianLanguage=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a System.Windows.FontEastAsianLanguage enumerated value that indicates the version of glyphs to be used for a specific writing system or language.
Get: EastAsianLanguage(self: Typography) -> FontEastAsianLanguage
Set: EastAsianLanguage(self: Typography)=value
"""
EastAsianWidths=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a System.Windows.FontEastAsianWidths enumerated value that indicates the proportional width to be used for Latin characters in an East Asian font.
Get: EastAsianWidths(self: Typography) -> FontEastAsianWidths
Set: EastAsianWidths(self: Typography)=value
"""
Fraction=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a System.Windows.FontFraction enumerated value that indicates the fraction style.
Get: Fraction(self: Typography) -> FontFraction
Set: Fraction(self: Typography)=value
"""
HistoricalForms=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that determines whether historical forms are enabled.
Get: HistoricalForms(self: Typography) -> bool
Set: HistoricalForms(self: Typography)=value
"""
HistoricalLigatures=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether historical ligatures are enabled.
Get: HistoricalLigatures(self: Typography) -> bool
Set: HistoricalLigatures(self: Typography)=value
"""
Kerning=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether kerning is enabled.
Get: Kerning(self: Typography) -> bool
Set: Kerning(self: Typography)=value
"""
MathematicalGreek=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether standard typographic font forms of Greek glyphs have been replaced with corresponding font forms commonly used in mathematical notation.
Get: MathematicalGreek(self: Typography) -> bool
Set: MathematicalGreek(self: Typography)=value
"""
NumeralAlignment=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a System.Windows.FontNumeralAlignment enumerated value that indicates the alighnment of widths when using numerals.
Get: NumeralAlignment(self: Typography) -> FontNumeralAlignment
Set: NumeralAlignment(self: Typography)=value
"""
NumeralStyle=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a System.Windows.FontNumeralStyle enumerated value that determines the set of glyphs that are used to render numeric alternate font forms.
Get: NumeralStyle(self: Typography) -> FontNumeralStyle
Set: NumeralStyle(self: Typography)=value
"""
SlashedZero=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a nominal zero font form should be replaced with a slashed zero.
Get: SlashedZero(self: Typography) -> bool
Set: SlashedZero(self: Typography)=value
"""
StandardLigatures=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether standard ligatures are enabled.
Get: StandardLigatures(self: Typography) -> bool
Set: StandardLigatures(self: Typography)=value
"""
StandardSwashes=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that specifies the index of a standard swashes form.
Get: StandardSwashes(self: Typography) -> int
Set: StandardSwashes(self: Typography)=value
"""
StylisticAlternates=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that specifies the index of a stylistic alternates form.
Get: StylisticAlternates(self: Typography) -> int
Set: StylisticAlternates(self: Typography)=value
"""
StylisticSet1=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet1(self: Typography) -> bool
Set: StylisticSet1(self: Typography)=value
"""
StylisticSet10=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet10(self: Typography) -> bool
Set: StylisticSet10(self: Typography)=value
"""
StylisticSet11=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet11(self: Typography) -> bool
Set: StylisticSet11(self: Typography)=value
"""
StylisticSet12=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet12(self: Typography) -> bool
Set: StylisticSet12(self: Typography)=value
"""
StylisticSet13=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet13(self: Typography) -> bool
Set: StylisticSet13(self: Typography)=value
"""
StylisticSet14=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet14(self: Typography) -> bool
Set: StylisticSet14(self: Typography)=value
"""
StylisticSet15=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet15(self: Typography) -> bool
Set: StylisticSet15(self: Typography)=value
"""
StylisticSet16=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet16(self: Typography) -> bool
Set: StylisticSet16(self: Typography)=value
"""
StylisticSet17=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet17(self: Typography) -> bool
Set: StylisticSet17(self: Typography)=value
"""
StylisticSet18=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet18(self: Typography) -> bool
Set: StylisticSet18(self: Typography)=value
"""
StylisticSet19=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet19(self: Typography) -> bool
Set: StylisticSet19(self: Typography)=value
"""
StylisticSet2=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet2(self: Typography) -> bool
Set: StylisticSet2(self: Typography)=value
"""
StylisticSet20=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet20(self: Typography) -> bool
Set: StylisticSet20(self: Typography)=value
"""
StylisticSet3=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet3(self: Typography) -> bool
Set: StylisticSet3(self: Typography)=value
"""
StylisticSet4=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet4(self: Typography) -> bool
Set: StylisticSet4(self: Typography)=value
"""
StylisticSet5=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet5(self: Typography) -> bool
Set: StylisticSet5(self: Typography)=value
"""
StylisticSet6=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet6(self: Typography) -> bool
Set: StylisticSet6(self: Typography)=value
"""
StylisticSet7=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet7(self: Typography) -> bool
Set: StylisticSet7(self: Typography)=value
"""
StylisticSet8=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet8(self: Typography) -> bool
Set: StylisticSet8(self: Typography)=value
"""
StylisticSet9=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a value that indicates whether a stylistic set of a font form is enabled.
Get: StylisticSet9(self: Typography) -> bool
Set: StylisticSet9(self: Typography)=value
"""
Variants=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets or sets a System.Windows.FontVariants enumerated value that indicates a variation of the standard typographic form to be used.
Get: Variants(self: Typography) -> FontVariants
Set: Variants(self: Typography)=value
"""
AnnotationAlternatesProperty=None
CapitalSpacingProperty=None
CapitalsProperty=None
CaseSensitiveFormsProperty=None
ContextualAlternatesProperty=None
ContextualLigaturesProperty=None
ContextualSwashesProperty=None
DiscretionaryLigaturesProperty=None
EastAsianExpertFormsProperty=None
EastAsianLanguageProperty=None
EastAsianWidthsProperty=None
FractionProperty=None
HistoricalFormsProperty=None
HistoricalLigaturesProperty=None
KerningProperty=None
MathematicalGreekProperty=None
NumeralAlignmentProperty=None
NumeralStyleProperty=None
SlashedZeroProperty=None
StandardLigaturesProperty=None
StandardSwashesProperty=None
StylisticAlternatesProperty=None
StylisticSet10Property=None
StylisticSet11Property=None
StylisticSet12Property=None
StylisticSet13Property=None
StylisticSet14Property=None
StylisticSet15Property=None
StylisticSet16Property=None
StylisticSet17Property=None
StylisticSet18Property=None
StylisticSet19Property=None
StylisticSet1Property=None
StylisticSet20Property=None
StylisticSet2Property=None
StylisticSet3Property=None
StylisticSet4Property=None
StylisticSet5Property=None
StylisticSet6Property=None
StylisticSet7Property=None
StylisticSet8Property=None
StylisticSet9Property=None
VariantsProperty=None
| 35.70283
| 186
| 0.732775
| 6,906
| 60,552
| 6.437446
| 0.045323
| 0.064917
| 0.048361
| 0.077378
| 0.76231
| 0.758823
| 0.758823
| 0.758823
| 0.745799
| 0.743482
| 0.00142
| 0.008327
| 0.200753
| 60,552
| 1,695
| 187
| 35.723894
| 0.908505
| 0.612185
| 0
| 0.498551
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.249275
| false
| 0.249275
| 0
| 0
| 0.501449
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
| 0
|
0
| 8
|
b11ce3cff2f5d7ed303297c1ffcef9e300d6334e
| 113
|
py
|
Python
|
app/test_celery/__init__.py
|
newbieof410/dockerize-flask-celery
|
2f8080c30f0f38da8984fe4d1f70c3e15576ab10
|
[
"MIT"
] | 1
|
2021-12-17T08:32:28.000Z
|
2021-12-17T08:32:28.000Z
|
app/test_celery/__init__.py
|
newbieof410/dockerize-flask-celery
|
2f8080c30f0f38da8984fe4d1f70c3e15576ab10
|
[
"MIT"
] | null | null | null |
app/test_celery/__init__.py
|
newbieof410/dockerize-flask-celery
|
2f8080c30f0f38da8984fe4d1f70c3e15576ab10
|
[
"MIT"
] | 1
|
2021-12-17T08:32:16.000Z
|
2021-12-17T08:32:16.000Z
|
from flask import Blueprint
test_celery = Blueprint('test_celery', __name__)
from app.test_celery import views
| 18.833333
| 48
| 0.814159
| 16
| 113
| 5.3125
| 0.5625
| 0.352941
| 0.447059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.123894
| 113
| 5
| 49
| 22.6
| 0.858586
| 0
| 0
| 0
| 0
| 0
| 0.097345
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 0.666667
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
|
0
| 7
|
b15900c1d221edb549efde461e7093ae929b54d5
| 225
|
py
|
Python
|
core/extensions/passive/__init__.py
|
Grass-CLP/kungfu
|
6085a9e642c092c97af0df849fb18ceeb8896bba
|
[
"Apache-2.0"
] | null | null | null |
core/extensions/passive/__init__.py
|
Grass-CLP/kungfu
|
6085a9e642c092c97af0df849fb18ceeb8896bba
|
[
"Apache-2.0"
] | null | null | null |
core/extensions/passive/__init__.py
|
Grass-CLP/kungfu
|
6085a9e642c092c97af0df849fb18ceeb8896bba
|
[
"Apache-2.0"
] | null | null | null |
from . import ${PROJECT_NAME} as ext
from extensions import EXTENSION_REGISTRY_MD, EXTENSION_REGISTRY_TD
EXTENSION_REGISTRY_MD.register_extension('passive', ext.MD)
EXTENSION_REGISTRY_TD.register_extension('passive', ext.TD)
| 45
| 67
| 0.848889
| 31
| 225
| 5.806452
| 0.419355
| 0.377778
| 0.211111
| 0.233333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 225
| 4
| 68
| 56.25
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0.062222
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.5
| 0.5
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
b15a9ffe9e5238d9db9ed7b695e13304c631628b
| 1,729
|
py
|
Python
|
tests/test_pypale.py
|
anthonycorletti/pypale
|
4c480205db88fb4a389a838bf55bb8fa663c0716
|
[
"MIT"
] | null | null | null |
tests/test_pypale.py
|
anthonycorletti/pypale
|
4c480205db88fb4a389a838bf55bb8fa663c0716
|
[
"MIT"
] | null | null | null |
tests/test_pypale.py
|
anthonycorletti/pypale
|
4c480205db88fb4a389a838bf55bb8fa663c0716
|
[
"MIT"
] | null | null | null |
import time
from pypale import Pypale
from tests.factories import test_email
def test_pypale_valid_token(_example_dot_com_1s_token: Pypale) -> None:
email = test_email()
token = _example_dot_com_1s_token.generate_token(email=email)
assert _example_dot_com_1s_token.valid_token(return_token=token, return_email=email)
def test_pypale_invalid_token_wrong_user(_example_dot_com_1s_token: Pypale) -> None:
email = test_email(name="userA")
token = _example_dot_com_1s_token.generate_token(email=email)
assert not _example_dot_com_1s_token.valid_token(
return_token=token, return_email=test_email(name="userB")
)
def test_pypale_token_issue_expiry(_example_dot_com_1s_token: Pypale) -> None:
email = test_email()
token = _example_dot_com_1s_token.generate_token(email=email)
time.sleep(2)
assert not _example_dot_com_1s_token.valid_token(
return_token=token, return_email=email
)
def test_pypale_invalid_token_no_user(_example_dot_com_1s_token: Pypale) -> None:
email = test_email(name="user")
token = _example_dot_com_1s_token.generate_token(email=email)
assert not _example_dot_com_1s_token.valid_token(
return_token=token, return_email=None
)
def test_pypale_invalid_token_no_return_token(
_example_dot_com_1s_token: Pypale,
) -> None:
email = test_email(name="user")
assert not _example_dot_com_1s_token.valid_token(
return_token=None, return_email=email
)
def test_pypale_invalid_token_binascii(
_example_dot_com_1s_token: Pypale,
) -> None:
email = test_email(name="user")
assert not _example_dot_com_1s_token.valid_token(
return_token="notvalidmultipleof4", return_email=email
)
| 32.018519
| 88
| 0.773279
| 254
| 1,729
| 4.724409
| 0.129921
| 0.133333
| 0.173333
| 0.2
| 0.8275
| 0.8275
| 0.803333
| 0.803333
| 0.769167
| 0.769167
| 0
| 0.012228
| 0.148641
| 1,729
| 53
| 89
| 32.622642
| 0.802989
| 0
| 0
| 0.45
| 0
| 0
| 0.023713
| 0
| 0
| 0
| 0
| 0
| 0.15
| 1
| 0.15
| false
| 0
| 0.075
| 0
| 0.225
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b178cd11656d72098d61170594cba7f13c66632f
| 5,234
|
py
|
Python
|
whitehat/images.py
|
EterNomm/Whitehat
|
411dcac24161d3a07b79b8c1294bb805452d6e4a
|
[
"Apache-2.0"
] | 7
|
2021-10-30T05:31:39.000Z
|
2022-02-19T21:44:02.000Z
|
whitehat/images.py
|
EterNomm/Whitehat
|
411dcac24161d3a07b79b8c1294bb805452d6e4a
|
[
"Apache-2.0"
] | null | null | null |
whitehat/images.py
|
EterNomm/Whitehat
|
411dcac24161d3a07b79b8c1294bb805452d6e4a
|
[
"Apache-2.0"
] | 5
|
2021-10-31T12:12:00.000Z
|
2022-03-03T03:25:02.000Z
|
from .errors import *
end_hex = b"\x00\x00\x00\x00\x49\x45\x4e\x44\xae\x42\x60\x82"
class PNG:
"""
Function for PNG.
-
-----
Parameter :
- image_path: `str` | Set PNG file to modify
-----
Methods :
- hide_message
- hide_program
- reveal_message
- reveal_program
"""
def __init__(self, image_path:str):
if image_path == None:
raise CannotNone("image_path")
self.image_path = image_path
def hide_message(self, message:str):
"""
Method to hide message in PNG file.
-----
Parameter :
- message: `str` | Messages that want to hide
"""
if message == None:
raise CannotNone("message")
encoded_msg = str.encode(message)
with open(self.image_path, 'ab') as f:
f.write(encoded_msg)
print("Success!")
def hide_program(self, program_path:str):
"""
Method to hide program (`.exe`) in PNG file.
-----
Parameter :
- program_path: `str` | Program that want to hide
"""
if program_path == None:
raise CannotNone("program_path")
if ".exe" in program_path:
pass
else:
program_path = program_path + ".exe"
with open(self.image_path, 'ab') as f, open(program_path, 'rb') as p:
f.write(p.read())
print("Success!")
def reveal_message(self, encoding:str="UTF-8"):
"""
Method to reveal secret message in PNG file.
-----
Parameter :
- encoding: `str` | Default: `UTF-8`
"""
with open(self.image_path, 'rb') as f:
content = f.read()
offset = content.index(end_hex)
f.seek(offset + len(end_hex))
return f.read().decode(encoding)
def reveal_program(self, new_name:str):
"""
Method to reveal secret program and create new file (`.exe`) from inside PNG file.
-
Parameter :
- new_name: `str` | Set name for files from PNG
"""
if new_name == None:
raise CannotNone("new_name")
if ".exe" in new_name:
pass
else:
new_name = new_name + ".exe"
with open(self.image_path, 'rb') as f:
content = f.read()
offset = content.index(end_hex)
f.seek(offset + len(end_hex))
new_name = str(new_name)
with open(new_name, 'wb') as p:
p.write(f.read())
print("Success!")
class JPG:
"""
Function for JPG/JPEG.
-
-----
Parameter :
- image_path: `str` | Set PNG file to modify
-----
Methods :
- hide_message
- hide_program
- reveal_message
- reveal_program
"""
def __init__(self, image_path:str):
if image_path == None:
raise CannotNone("image_path")
self.image_path = image_path
def hide_message(self, message:str):
"""
Method to hide message in JPG/JPEG file.
-----
Parameter :
- message: `str` | Messages that want to hide
"""
if message == None:
raise CannotNone("message")
encoded_msg = str.encode(message)
with open(self.image_path, 'ab') as f:
f.write(encoded_msg)
print("Success!")
def hide_program(self, program_path:str):
"""
Method to hide program (`.exe`) in JPG/JPEG file.
-----
Parameter :
- program_path: `str` | Program that want to hide
"""
if program_path == None:
raise CannotNone("program_path")
if ".exe" in program_path:
pass
else:
program_path = program_path + ".exe"
with open(self.image_path, 'ab') as f, open(program_path, 'rb') as p:
f.write(p.read())
print("Success!")
def reveal_message(self, encoding:str="UTF-8"):
"""
Method to reveal secret message in JPG/JPEG file.
-----
Parameter :
- encoding: `str` | Default: `UTF-8`
"""
with open(self.image_path, 'rb') as f:
content = f.read()
offset = content.index(bytes.fromhex('FFD9'))
f.seek(offset + 2)
return f.read().decode(encoding)
def reveal_program(self, new_name:str):
"""
Method to reveal secret program and create new file (`.exe`) from inside JPG/JPEG file.
-
Parameter :
- new_name: `str` | Set name for files from JPG/JPEG
"""
if new_name == None:
raise CannotNone("new_name")
if ".exe" in new_name:
pass
else:
new_name = new_name + ".exe"
with open(self.image_path, 'rb') as f:
content = f.read()
offset = content.index(bytes.fromhex('FFD9'))
f.seek(offset + 2)
new_name = str(new_name)
with open(new_name, 'wb') as p:
p.write(f.read())
print("Success!")
| 25.043062
| 95
| 0.507833
| 606
| 5,234
| 4.239274
| 0.141914
| 0.070066
| 0.060724
| 0.052939
| 0.949397
| 0.944336
| 0.938887
| 0.938887
| 0.938887
| 0.938887
| 0
| 0.008812
| 0.371227
| 5,234
| 208
| 96
| 25.163462
| 0.771802
| 0.242835
| 0
| 0.954545
| 0
| 0.011364
| 0.070196
| 0.013809
| 0
| 0
| 0
| 0
| 0
| 1
| 0.113636
| false
| 0.045455
| 0.011364
| 0
| 0.170455
| 0.068182
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4946d419b5f36c3fcc6d79d7c8b32ec4870972f9
| 79
|
py
|
Python
|
lg_attract_loop/src/lg_attract_loop/__init__.py
|
carlosvquezada/lg_ros_nodes
|
7560e99272d06ef5c80a5444131dad72c078a718
|
[
"Apache-2.0"
] | null | null | null |
lg_attract_loop/src/lg_attract_loop/__init__.py
|
carlosvquezada/lg_ros_nodes
|
7560e99272d06ef5c80a5444131dad72c078a718
|
[
"Apache-2.0"
] | null | null | null |
lg_attract_loop/src/lg_attract_loop/__init__.py
|
carlosvquezada/lg_ros_nodes
|
7560e99272d06ef5c80a5444131dad72c078a718
|
[
"Apache-2.0"
] | null | null | null |
from attract_loop import AttractLoop
from attract_loop import DirectorAPIProxy
| 26.333333
| 41
| 0.898734
| 10
| 79
| 6.9
| 0.6
| 0.318841
| 0.434783
| 0.608696
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.101266
| 79
| 2
| 42
| 39.5
| 0.971831
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
4976363922fdda65854d391c87f74cfdc8d8f7f4
| 5,338
|
py
|
Python
|
loadModelTest.py
|
low2by/holofan
|
48e1ba50be8e969d053ecaa61dbc9c571ef6c8fc
|
[
"MIT"
] | null | null | null |
loadModelTest.py
|
low2by/holofan
|
48e1ba50be8e969d053ecaa61dbc9c571ef6c8fc
|
[
"MIT"
] | null | null | null |
loadModelTest.py
|
low2by/holofan
|
48e1ba50be8e969d053ecaa61dbc9c571ef6c8fc
|
[
"MIT"
] | null | null | null |
import tensorflow as tf
import numpy as np
from PIL import Image
import matplotlib.pyplot as plt
x_test = np.load("face_landmarks_cleaned/x_test.npy")/255
model_layers = [
tf.keras.layers.SeparableConv2D( 128 , input_shape=( 96 , 96 , 1 ) , kernel_size=( 5 , 5 ) , strides=1 ),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation( 'relu' ) ,
tf.keras.layers.SeparableConv2D( 128 , kernel_size=( 5 , 5 ) , strides=1 ),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation( 'relu' ) ,
tf.keras.layers.SeparableConv2D( 128 , kernel_size=( 5 , 5 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 128 , kernel_size=( 5 , 5 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 128 , kernel_size=( 5 , 5 ) , strides=1 ),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation( 'relu' ) ,
tf.keras.layers.SeparableConv2D( 128 , kernel_size=( 5 , 5 ) , strides=1 ),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation( 'relu' ) ,
tf.keras.layers.SeparableConv2D( 128 , kernel_size=( 5 , 5 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 128 , kernel_size=( 5 , 5 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 128 , kernel_size=( 5 , 5 ) , strides=1 ),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation( 'relu' ) ,
tf.keras.layers.SeparableConv2D( 128 , kernel_size=( 5 , 5 ) , strides=1 ),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation( 'relu' ) ,
tf.keras.layers.SeparableConv2D( 64 , kernel_size=( 5 , 5 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 64 , kernel_size=( 5 , 5 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 64 , kernel_size=( 5 , 5 ) , strides=1),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation( 'relu' ) ,
tf.keras.layers.SeparableConv2D( 64 , kernel_size=( 5 , 5 ) , strides=1),
tf.keras.layers.BatchNormalization(),
tf.keras.layers.Activation( 'relu' ) ,
tf.keras.layers.SeparableConv2D( 64 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 64 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 64 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 64 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 64 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 64 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 32 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 32 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 32 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 32 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 32 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 32 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 32 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 32 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 32 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 32 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 32 , kernel_size=( 3 , 3 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 32 , kernel_size=( 2 , 2 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 30 , kernel_size=( 2 , 2 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 30 , kernel_size=( 2 , 2 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 30 , kernel_size=( 2 , 2 ) , strides=1 , activation='relu' ),
tf.keras.layers.SeparableConv2D( 4 , kernel_size=( 2 , 2 ) , strides=1 , activation='sigmoid' ),
]
model = tf.keras.Sequential( model_layers )
model.compile( loss=tf.keras.losses.mean_squared_error , optimizer=tf.keras.optimizers.Adam( lr=0.0001 ) , metrics=[ 'mse' ] )
model.load_weights('best_weights_3.29.2021.hdf5')
# x_train = np.load( "face_landmarks_cleaned/x_train.npy" ) / 255
# print(x_train)
i = 100
im1 = np.array(Image.open('face_landmarks_cleaned/me3.jpg').convert('L')).reshape((1, 96, 96, 1))/255
im1 = im1.T
im2 = x_test[i:i+1]
pred1 = model.predict(im1) * 96
pred1 = pred1.astype(np.int32)
pred1 = np.reshape(pred1[0, 0, 0], (2, 2))
pred2 = model.predict(im2) * 96
pred2 = pred2.astype(np.int32)
pred2 = np.reshape(pred2[0, 0, 0], (2, 2))
fig = plt.figure(figsize=(50, 50))
fig.add_subplot(1, 2, 1)
plt.imshow(im1.reshape((96,96)).T.astype( np.float )*255 , cmap='gray' )
plt.scatter(pred1[:, 0], pred1[:, 1], c='yellow')
fig.add_subplot(1, 2, 2)
plt.imshow(im2.reshape((96,96)).T.astype( np.float )*255 , cmap='gray' )
plt.scatter(pred2[:, 0], pred2[:, 1], c='yellow')
plt.show()
| 48.527273
| 126
| 0.662608
| 736
| 5,338
| 4.726902
| 0.130435
| 0.110664
| 0.194309
| 0.289738
| 0.818051
| 0.797643
| 0.782121
| 0.773498
| 0.773498
| 0.773498
| 0
| 0.074149
| 0.163732
| 5,338
| 109
| 127
| 48.972477
| 0.705197
| 0.014612
| 0
| 0.6125
| 0
| 0
| 0.049648
| 0.01712
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.05
| 0
| 0.05
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
4991f0f12881e1bfeced54dd39f56dcfc9443bf8
| 7,536
|
py
|
Python
|
models.py
|
Niuyuhang03/pyGAT
|
93e1d886adb6766c2a4b2a0a254f0a949670dd86
|
[
"MIT"
] | null | null | null |
models.py
|
Niuyuhang03/pyGAT
|
93e1d886adb6766c2a4b2a0a254f0a949670dd86
|
[
"MIT"
] | null | null | null |
models.py
|
Niuyuhang03/pyGAT
|
93e1d886adb6766c2a4b2a0a254f0a949670dd86
|
[
"MIT"
] | null | null | null |
import torch.nn as nn
import torch
import torch.nn.functional as F
from layers import GraphAttentionLayer, GraphAttentionLayer_rel, StructuralFingerprintLayer, RWRLayer, GraphAttentionLayer_all
class GAT(nn.Module):
def __init__(self, nfeat, nhid, nclass, dropout, alpha, nheads, dataset, experiment):
super(GAT, self).__init__()
self.dropout = dropout
self.dataset = dataset
self.experiment = experiment
self.attentions = [GraphAttentionLayer(nfeat, nhid, dropout=dropout, alpha=alpha, concat=True) for _ in range(nheads)]
for i, attention in enumerate(self.attentions):
self.add_module('attention_{}'.format(i), attention)
self.out_att = GraphAttentionLayer(nhid * nheads, nfeat, dropout=dropout, alpha=alpha, concat=False)
self.linear_att = nn.Linear(nfeat, nclass)
def forward(self, x, adj, names=None, print_flag=False):
x = F.dropout(x, self.dropout, training=self.training)
x = torch.cat([att(x, adj) for att in self.attentions], dim=1)
x = F.dropout(x, self.dropout, training=self.training)
x = self.out_att(x, adj)
if print_flag:
with open("./{}/GAT_{}_output.txt".format(self.experiment, self.dataset), "w") as output_f:
x_array = x.cpu().detach().numpy()
for idx in range(len(x_array)):
line = names[idx].split('\t')
output_f.write(str(line[0]))
for i in x_array[idx]:
output_f.write('\t' + str(i))
output_f.write('\n')
# 增加一个全连接层
x = F.elu(self.linear_att(x))
return F.log_softmax(x, dim=1)
class GAT_rel(nn.Module):
def __init__(self, nfeat, nhid, nrel, nclass, dropout, alpha, nheads, dataset, experiment):
super(GAT_rel, self).__init__()
self.dropout = dropout
self.dataset = dataset
self.experiment = experiment
self.attentions = [GraphAttentionLayer_rel(nfeat, nhid, nrel=nrel, dropout=dropout, alpha=alpha, concat=True) for _ in range(nheads)]
for i, attention in enumerate(self.attentions):
self.add_module('attention_{}'.format(i), attention)
self.out_att = GraphAttentionLayer_rel(nhid * nheads, nfeat, nrel=nrel, dropout=dropout, alpha=alpha, concat=False)
self.linear_att = nn.Linear(nfeat, nclass)
def forward(self, x, rel, rel_dict, adj, names=None, print_flag=False):
x = F.dropout(x, self.dropout, training=self.training)
x = torch.cat([att(x, rel, rel_dict, adj) for att in self.attentions], dim=1)
x = F.dropout(x, self.dropout, training=self.training)
x = self.out_att(x, rel, rel_dict, adj)
if print_flag:
with open("./{}/GAT_{}_output.txt".format(self.experiment, self.dataset), "w") as output_f:
x_array = x.cpu().detach().numpy()
for idx in range(len(x_array)):
line = names[idx].split('\t')
output_f.write(str(line[0]))
for i in x_array[idx]:
output_f.write('\t' + str(i))
output_f.write('\n')
# 增加一个全连接层
x = F.elu(self.linear_att(x))
return F.log_softmax(x, dim=1)
class RWR_process(nn.Module):
def __init__(self, nfeat, nhid, nclass, dropout, alpha, nheads, dataset_str):
"""version of RWR_process."""
super(RWR_process, self).__init__()
self.dropout = dropout
self.attentions = [RWRLayer(nfeat, nhid, dropout=dropout, alpha=alpha, dataset_str=dataset_str, concat=True) for _ in range(nheads)]
for i, attention in enumerate(self.attentions):
self.add_module('attention_{}'.format(i), attention)
self.out_att = RWRLayer(nhid * nheads, nclass, dropout=dropout, alpha=alpha, dataset_str=dataset_str, concat=False)
def forward(self, x, adj, adj_ad, names=None, print_flag=False):
x = F.dropout(x, self.dropout, training=self.training)
x = torch.cat([att(x, adj, adj_ad) for att in self.attentions], dim=1)
x = F.dropout(x, self.dropout, training=self.training)
x = F.elu(self.out_att(x, adj, adj_ad))
return F.log_softmax(x, dim=1)
class ADSF(nn.Module):
def __init__(self, nfeat, nhid, nclass, dropout, alpha, nheads, dataset, experiment):
"""version of ADSF."""
super(ADSF, self).__init__()
self.dropout = dropout
self.dataset = dataset
self.experiment = experiment
self.attentions = [StructuralFingerprintLayer(nfeat, nhid, dropout=dropout, alpha=alpha, concat=True) for _ in range(nheads)]
for i, attention in enumerate(self.attentions):
self.add_module('attention_{}'.format(i), attention) # 按attention_i名使用layer,似乎未用到
self.out_att = StructuralFingerprintLayer(nhid * nheads, nfeat, dropout=dropout, alpha=alpha, concat=False)
self.linear_att = nn.Linear(nfeat, nclass)
def forward(self, x, adj, adj_ad, names=None, print_flag=False):
x = F.dropout(x, self.dropout, training=self.training)
x = torch.cat([att(x, adj, adj_ad) for att in self.attentions], dim=1)
x = F.dropout(x, self.dropout, training=self.training)
x = F.elu(self.out_att(x, adj, adj_ad))
if print_flag:
with open("./{}/GAT_{}_output.txt".format(self.experiment, self.dataset), "w") as output_f:
x_array = x.cpu().detach().numpy()
for idx in range(len(x_array)):
line = names[idx].split('\t')
output_f.write(str(line[0]))
for i in x_array[idx]:
output_f.write('\t' + str(i))
output_f.write('\n')
x = F.elu(self.linear_att(x))
return F.log_softmax(x, dim=1)
class GAT_all(nn.Module):
def __init__(self, nfeat, nhid, nrel, nclass, dropout, alpha, nheads, dataset, experiment):
super(GAT_all, self).__init__()
self.dropout = dropout
self.dataset = dataset
self.experiment = experiment
self.attentions = [GraphAttentionLayer_all(nfeat, nhid, nrel, dropout=dropout, alpha=alpha, concat=True) for _ in range(nheads)] # nfeat -> nfeat
for i, attention in enumerate(self.attentions):
self.add_module('attention_{}'.format(i), attention)
self.out_att = GraphAttentionLayer_all(nhid * nheads, nfeat, nrel, dropout=dropout, alpha=alpha, concat=False) # nfeat * nheads -> nfeat * nheads
self.linear_att = nn.Linear(nfeat, nclass)
def forward(self, x, rel, rel_dict, adj, adj_ad, names=None, print_flag=False):
x = F.dropout(x, self.dropout, training=self.training)
x = torch.cat([att(x, rel, rel_dict, adj, adj_ad) for att in self.attentions], dim=1)
x = F.dropout(x, self.dropout, training=self.training)
x = self.out_att(x, rel, rel_dict, adj, adj_ad)
if print_flag:
with open("./{}/GAT_{}_output.txt".format(self.experiment, self.dataset), "w") as output_f:
x_array = x.cpu().detach().numpy()
for idx in range(len(x_array)):
line = names[idx].split('\t')
output_f.write(str(line[0]))
for i in x_array[idx]:
output_f.write('\t' + str(i))
output_f.write('\n')
# 增加一个全连接层
x = F.elu(self.linear_att(x))
return F.log_softmax(x, dim=1)
| 51.265306
| 154
| 0.611863
| 1,006
| 7,536
| 4.427435
| 0.088469
| 0.007185
| 0.03233
| 0.053884
| 0.891109
| 0.891109
| 0.882353
| 0.870903
| 0.863044
| 0.84194
| 0
| 0.002494
| 0.255042
| 7,536
| 146
| 155
| 51.616438
| 0.79088
| 0.018976
| 0
| 0.730159
| 0
| 0
| 0.023858
| 0.011929
| 0
| 0
| 0
| 0
| 0
| 1
| 0.079365
| false
| 0
| 0.031746
| 0
| 0.190476
| 0.095238
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
499bf5800cdea979d18b6095f0573fa39bb27f87
| 101
|
py
|
Python
|
models/__init__.py
|
qixuxiang/Pytorch_Lightweight_Network
|
25fd3148b7c635cb6cbe6dc184dbed04d6f96282
|
[
"MIT"
] | 82
|
2019-06-17T06:00:09.000Z
|
2021-11-24T09:27:23.000Z
|
models/__init__.py
|
qixuxiang/Pytorch_Lightweight_Network
|
25fd3148b7c635cb6cbe6dc184dbed04d6f96282
|
[
"MIT"
] | 4
|
2019-06-20T11:29:19.000Z
|
2021-07-28T03:31:20.000Z
|
models/__init__.py
|
qixuxiang/Pytorch_Lightweight_Network
|
25fd3148b7c635cb6cbe6dc184dbed04d6f96282
|
[
"MIT"
] | 17
|
2019-06-20T11:22:34.000Z
|
2021-03-16T12:37:41.000Z
|
from models.defaults import set_default_activation
from models.defaults import set_default_norm_layer
| 50.5
| 50
| 0.910891
| 15
| 101
| 5.8
| 0.6
| 0.229885
| 0.413793
| 0.551724
| 0.781609
| 0.781609
| 0
| 0
| 0
| 0
| 0
| 0
| 0.069307
| 101
| 2
| 51
| 50.5
| 0.925532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
49a0524fabf8517a5e4eb23f2f91a4d9d61d8c6c
| 98
|
py
|
Python
|
pymtl3/passes/tracing/__init__.py
|
kevinyuan/pymtl3
|
5949e6a4acc625c0ccbbb25be3af1d0db683df3c
|
[
"BSD-3-Clause"
] | 152
|
2020-06-03T02:34:11.000Z
|
2022-03-30T04:16:45.000Z
|
pymtl3/passes/tracing/__init__.py
|
kevinyuan/pymtl3
|
5949e6a4acc625c0ccbbb25be3af1d0db683df3c
|
[
"BSD-3-Clause"
] | 139
|
2019-05-29T00:37:09.000Z
|
2020-05-17T16:49:26.000Z
|
pymtl3/passes/tracing/__init__.py
|
kevinyuan/pymtl3
|
5949e6a4acc625c0ccbbb25be3af1d0db683df3c
|
[
"BSD-3-Clause"
] | 22
|
2020-05-18T13:42:05.000Z
|
2022-03-11T08:37:51.000Z
|
from .PrintTextWavePass import PrintTextWavePass
from .VcdGenerationPass import VcdGenerationPass
| 32.666667
| 48
| 0.897959
| 8
| 98
| 11
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.081633
| 98
| 2
| 49
| 49
| 0.977778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 1
| 0
| 1
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
771059365b71b1191c20b6fcc0e1f53e97d9c9f3
| 136
|
py
|
Python
|
src/app/database/__init__.py
|
william920429/repair_system
|
27b5f4612a48a326943d23c2acce326070134228
|
[
"MIT"
] | 1
|
2021-05-18T17:28:09.000Z
|
2021-05-18T17:28:09.000Z
|
src/app/database/__init__.py
|
SiriusKoan/repair_system
|
6524e7a8d5954b6e16b34b9d88db9d455ef3dd1d
|
[
"MIT"
] | 3
|
2021-05-02T06:00:47.000Z
|
2021-05-05T11:42:58.000Z
|
src/app/database/__init__.py
|
SiriusKoan/repair_system
|
6524e7a8d5954b6e16b34b9d88db9d455ef3dd1d
|
[
"MIT"
] | 3
|
2021-04-15T13:03:36.000Z
|
2021-05-06T03:13:59.000Z
|
from . import backup_helper, db_helper
from .common import cache
from .model import db
__all__ = [db, backup_helper, db_helper, cache]
| 22.666667
| 47
| 0.779412
| 21
| 136
| 4.666667
| 0.428571
| 0.244898
| 0.285714
| 0.408163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.147059
| 136
| 5
| 48
| 27.2
| 0.844828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.75
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
7740e3afc264edef397b1dea1673c417a3649aa1
| 163,234
|
py
|
Python
|
src/oci/devops/devops_client.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/devops/devops_client.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
src/oci/devops/devops_client.py
|
Manny27nyc/oci-python-sdk
|
de60b04e07a99826254f7255e992f41772902df7
|
[
"Apache-2.0",
"BSD-3-Clause"
] | null | null | null |
# coding: utf-8
# Copyright (c) 2016, 2021, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
from __future__ import absolute_import
from oci._vendor import requests # noqa: F401
from oci._vendor import six
from oci import retry # noqa: F401
from oci.base_client import BaseClient
from oci.config import get_config_value_or_default, validate_config
from oci.signer import Signer
from oci.util import Sentinel, get_signer_from_authentication_type, AUTHENTICATION_TYPE_FIELD_NAME
from .models import devops_type_mapping
missing = Sentinel("Missing")
class DevopsClient(object):
"""
Use the DevOps APIs to create a DevOps project to group the pipelines, add reference to target deployment environments, add artifacts to deploy, and create deployment pipelines needed to deploy your software.
"""
def __init__(self, config, **kwargs):
"""
Creates a new service client
:param dict config:
Configuration keys and values as per `SDK and Tool Configuration <https://docs.cloud.oracle.com/Content/API/Concepts/sdkconfig.htm>`__.
The :py:meth:`~oci.config.from_file` method can be used to load configuration from a file. Alternatively, a ``dict`` can be passed. You can validate_config
the dict using :py:meth:`~oci.config.validate_config`
:param str service_endpoint: (optional)
The endpoint of the service to call using this client. For example ``https://iaas.us-ashburn-1.oraclecloud.com``. If this keyword argument is
not provided then it will be derived using the region in the config parameter. You should only provide this keyword argument if you have an explicit
need to specify a service endpoint.
:param timeout: (optional)
The connection and read timeouts for the client. The default values are connection timeout 10 seconds and read timeout 60 seconds. This keyword argument can be provided
as a single float, in which case the value provided is used for both the read and connection timeouts, or as a tuple of two floats. If
a tuple is provided then the first value is used as the connection timeout and the second value as the read timeout.
:type timeout: float or tuple(float, float)
:param signer: (optional)
The signer to use when signing requests made by the service client. The default is to use a :py:class:`~oci.signer.Signer` based on the values
provided in the config parameter.
One use case for this parameter is for `Instance Principals authentication <https://docs.cloud.oracle.com/Content/Identity/Tasks/callingservicesfrominstances.htm>`__
by passing an instance of :py:class:`~oci.auth.signers.InstancePrincipalsSecurityTokenSigner` as the value for this keyword argument
:type signer: :py:class:`~oci.signer.AbstractBaseSigner`
:param obj retry_strategy: (optional)
A retry strategy to apply to all calls made by this service client (i.e. at the client level). There is no retry strategy applied by default.
Retry strategies can also be applied at the operation level by passing a ``retry_strategy`` keyword argument as part of calling the operation.
Any value provided at the operation level will override whatever is specified at the client level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
"""
validate_config(config, signer=kwargs.get('signer'))
if 'signer' in kwargs:
signer = kwargs['signer']
elif AUTHENTICATION_TYPE_FIELD_NAME in config:
signer = get_signer_from_authentication_type(config)
else:
signer = Signer(
tenancy=config["tenancy"],
user=config["user"],
fingerprint=config["fingerprint"],
private_key_file_location=config.get("key_file"),
pass_phrase=get_config_value_or_default(config, "pass_phrase"),
private_key_content=config.get("key_content")
)
base_client_init_kwargs = {
'regional_client': True,
'service_endpoint': kwargs.get('service_endpoint'),
'base_path': '/20210630',
'service_endpoint_template': 'https://devops.{region}.oci.{secondLevelDomain}',
'skip_deserialization': kwargs.get('skip_deserialization', False)
}
if 'timeout' in kwargs:
base_client_init_kwargs['timeout'] = kwargs.get('timeout')
self.base_client = BaseClient("devops", config, signer, devops_type_mapping, **base_client_init_kwargs)
self.retry_strategy = kwargs.get('retry_strategy')
def approve_deployment(self, deployment_id, approve_deployment_details, **kwargs):
"""
Submit stage approval.
:param str deployment_id: (required)
Unique deployment identifier.
:param oci.devops.models.ApproveDeploymentDetails approve_deployment_details: (required)
The stage information for approval.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param str opc_retry_token: (optional)
A token that uniquely identifies a request so it can be retried in case of a timeout or server error without risk of executing that same action again. Retry tokens expire after 24 hours, but can be invalidated earlier due to conflicting operations. For example, if a resource has been deleted and purged from the system, then a retry of the original creation request might be rejected.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.Deployment`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/approve_deployment.py.html>`__ to see an example of how to use approve_deployment API.
"""
resource_path = "/deployments/{deploymentId}/actions/approve"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id",
"opc_retry_token"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"approve_deployment got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deploymentId": deployment_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing),
"opc-retry-token": kwargs.get("opc_retry_token", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_retry_token_if_needed(header_params)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=approve_deployment_details,
response_type="Deployment")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=approve_deployment_details,
response_type="Deployment")
def cancel_deployment(self, deployment_id, cancel_deployment_details, **kwargs):
"""
Cancels a deployment resource by identifier.
:param str deployment_id: (required)
Unique deployment identifier.
:param oci.devops.models.CancelDeploymentDetails cancel_deployment_details: (required)
The information regarding the deployment to be canceled.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param str opc_retry_token: (optional)
A token that uniquely identifies a request so it can be retried in case of a timeout or server error without risk of executing that same action again. Retry tokens expire after 24 hours, but can be invalidated earlier due to conflicting operations. For example, if a resource has been deleted and purged from the system, then a retry of the original creation request might be rejected.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.Deployment`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/cancel_deployment.py.html>`__ to see an example of how to use cancel_deployment API.
"""
resource_path = "/deployments/{deploymentId}/actions/cancel"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id",
"opc_retry_token"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"cancel_deployment got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deploymentId": deployment_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing),
"opc-retry-token": kwargs.get("opc_retry_token", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_retry_token_if_needed(header_params)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=cancel_deployment_details,
response_type="Deployment")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=cancel_deployment_details,
response_type="Deployment")
def change_project_compartment(self, project_id, change_project_compartment_details, **kwargs):
"""
Moves a project resource from one compartment OCID to another.
:param str project_id: (required)
Unique project identifier.
:param oci.devops.models.ChangeProjectCompartmentDetails change_project_compartment_details: (required)
The information to be updated.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param str opc_retry_token: (optional)
A token that uniquely identifies a request so it can be retried in case of a timeout or server error without risk of executing that same action again. Retry tokens expire after 24 hours, but can be invalidated earlier due to conflicting operations. For example, if a resource has been deleted and purged from the system, then a retry of the original creation request might be rejected.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type None
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/change_project_compartment.py.html>`__ to see an example of how to use change_project_compartment API.
"""
resource_path = "/projects/{projectId}/actions/changeCompartment"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id",
"opc_retry_token"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"change_project_compartment got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"projectId": project_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing),
"opc-retry-token": kwargs.get("opc_retry_token", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_retry_token_if_needed(header_params)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=change_project_compartment_details)
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=change_project_compartment_details)
def create_deploy_artifact(self, create_deploy_artifact_details, **kwargs):
"""
Creates a new deployment artifact.
:param oci.devops.models.CreateDeployArtifactDetails create_deploy_artifact_details: (required)
Details for the new deployment artifact.
:param str opc_retry_token: (optional)
A token that uniquely identifies a request so it can be retried in case of a timeout or server error without risk of executing that same action again. Retry tokens expire after 24 hours, but can be invalidated earlier due to conflicting operations. For example, if a resource has been deleted and purged from the system, then a retry of the original creation request might be rejected.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployArtifact`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/create_deploy_artifact.py.html>`__ to see an example of how to use create_deploy_artifact API.
"""
resource_path = "/deployArtifacts"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_retry_token",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"create_deploy_artifact got unknown kwargs: {!r}".format(extra_kwargs))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-retry-token": kwargs.get("opc_retry_token", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_retry_token_if_needed(header_params)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
header_params=header_params,
body=create_deploy_artifact_details,
response_type="DeployArtifact")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
header_params=header_params,
body=create_deploy_artifact_details,
response_type="DeployArtifact")
def create_deploy_environment(self, create_deploy_environment_details, **kwargs):
"""
Creates a new deployment environment.
:param oci.devops.models.CreateDeployEnvironmentDetails create_deploy_environment_details: (required)
Details for the new deployment environment.
:param str opc_retry_token: (optional)
A token that uniquely identifies a request so it can be retried in case of a timeout or server error without risk of executing that same action again. Retry tokens expire after 24 hours, but can be invalidated earlier due to conflicting operations. For example, if a resource has been deleted and purged from the system, then a retry of the original creation request might be rejected.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployEnvironment`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/create_deploy_environment.py.html>`__ to see an example of how to use create_deploy_environment API.
"""
resource_path = "/deployEnvironments"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_retry_token",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"create_deploy_environment got unknown kwargs: {!r}".format(extra_kwargs))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-retry-token": kwargs.get("opc_retry_token", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_retry_token_if_needed(header_params)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
header_params=header_params,
body=create_deploy_environment_details,
response_type="DeployEnvironment")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
header_params=header_params,
body=create_deploy_environment_details,
response_type="DeployEnvironment")
def create_deploy_pipeline(self, create_deploy_pipeline_details, **kwargs):
"""
Creates a new deployment pipeline.
:param oci.devops.models.CreateDeployPipelineDetails create_deploy_pipeline_details: (required)
Details for the new deployment pipeline.
:param str opc_retry_token: (optional)
A token that uniquely identifies a request so it can be retried in case of a timeout or server error without risk of executing that same action again. Retry tokens expire after 24 hours, but can be invalidated earlier due to conflicting operations. For example, if a resource has been deleted and purged from the system, then a retry of the original creation request might be rejected.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployPipeline`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/create_deploy_pipeline.py.html>`__ to see an example of how to use create_deploy_pipeline API.
"""
resource_path = "/deployPipelines"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_retry_token",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"create_deploy_pipeline got unknown kwargs: {!r}".format(extra_kwargs))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-retry-token": kwargs.get("opc_retry_token", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_retry_token_if_needed(header_params)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
header_params=header_params,
body=create_deploy_pipeline_details,
response_type="DeployPipeline")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
header_params=header_params,
body=create_deploy_pipeline_details,
response_type="DeployPipeline")
def create_deploy_stage(self, create_deploy_stage_details, **kwargs):
"""
Creates a new deployment stage.
:param oci.devops.models.CreateDeployStageDetails create_deploy_stage_details: (required)
Details for the new DeployStage.
:param str opc_retry_token: (optional)
A token that uniquely identifies a request so it can be retried in case of a timeout or server error without risk of executing that same action again. Retry tokens expire after 24 hours, but can be invalidated earlier due to conflicting operations. For example, if a resource has been deleted and purged from the system, then a retry of the original creation request might be rejected.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployStage`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/create_deploy_stage.py.html>`__ to see an example of how to use create_deploy_stage API.
"""
resource_path = "/deployStages"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_retry_token",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"create_deploy_stage got unknown kwargs: {!r}".format(extra_kwargs))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-retry-token": kwargs.get("opc_retry_token", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_retry_token_if_needed(header_params)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
header_params=header_params,
body=create_deploy_stage_details,
response_type="DeployStage")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
header_params=header_params,
body=create_deploy_stage_details,
response_type="DeployStage")
def create_deployment(self, create_deployment_details, **kwargs):
"""
Creates a new deployment.
:param oci.devops.models.CreateDeploymentDetails create_deployment_details: (required)
Details for the new deployment.
:param str opc_retry_token: (optional)
A token that uniquely identifies a request so it can be retried in case of a timeout or server error without risk of executing that same action again. Retry tokens expire after 24 hours, but can be invalidated earlier due to conflicting operations. For example, if a resource has been deleted and purged from the system, then a retry of the original creation request might be rejected.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.Deployment`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/create_deployment.py.html>`__ to see an example of how to use create_deployment API.
"""
resource_path = "/deployments"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_retry_token",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"create_deployment got unknown kwargs: {!r}".format(extra_kwargs))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-retry-token": kwargs.get("opc_retry_token", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_retry_token_if_needed(header_params)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
header_params=header_params,
body=create_deployment_details,
response_type="Deployment")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
header_params=header_params,
body=create_deployment_details,
response_type="Deployment")
def create_project(self, create_project_details, **kwargs):
"""
Creates a new project.
:param oci.devops.models.CreateProjectDetails create_project_details: (required)
Details for the new project.
:param str opc_retry_token: (optional)
A token that uniquely identifies a request so it can be retried in case of a timeout or server error without risk of executing that same action again. Retry tokens expire after 24 hours, but can be invalidated earlier due to conflicting operations. For example, if a resource has been deleted and purged from the system, then a retry of the original creation request might be rejected.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.Project`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/create_project.py.html>`__ to see an example of how to use create_project API.
"""
resource_path = "/projects"
method = "POST"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_retry_token",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"create_project got unknown kwargs: {!r}".format(extra_kwargs))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-retry-token": kwargs.get("opc_retry_token", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
if not isinstance(retry_strategy, retry.NoneRetryStrategy):
self.base_client.add_opc_retry_token_if_needed(header_params)
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
header_params=header_params,
body=create_project_details,
response_type="Project")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
header_params=header_params,
body=create_project_details,
response_type="Project")
def delete_deploy_artifact(self, deploy_artifact_id, **kwargs):
"""
Deletes a deployment artifact resource by identifier.
:param str deploy_artifact_id: (required)
Unique artifact identifier.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type None
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/delete_deploy_artifact.py.html>`__ to see an example of how to use delete_deploy_artifact API.
"""
resource_path = "/deployArtifacts/{deployArtifactId}"
method = "DELETE"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"delete_deploy_artifact got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deployArtifactId": deploy_artifact_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params)
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params)
def delete_deploy_environment(self, deploy_environment_id, **kwargs):
"""
Deletes a deployment environment resource by identifier.
:param str deploy_environment_id: (required)
Unique environment identifier.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type None
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/delete_deploy_environment.py.html>`__ to see an example of how to use delete_deploy_environment API.
"""
resource_path = "/deployEnvironments/{deployEnvironmentId}"
method = "DELETE"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"delete_deploy_environment got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deployEnvironmentId": deploy_environment_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params)
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params)
def delete_deploy_pipeline(self, deploy_pipeline_id, **kwargs):
"""
Deletes a deployment pipeline resource by identifier.
:param str deploy_pipeline_id: (required)
Unique pipeline identifier.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type None
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/delete_deploy_pipeline.py.html>`__ to see an example of how to use delete_deploy_pipeline API.
"""
resource_path = "/deployPipelines/{deployPipelineId}"
method = "DELETE"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"delete_deploy_pipeline got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deployPipelineId": deploy_pipeline_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params)
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params)
def delete_deploy_stage(self, deploy_stage_id, **kwargs):
"""
Deletes a deployment stage resource by identifier.
:param str deploy_stage_id: (required)
Unique stage identifier.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type None
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/delete_deploy_stage.py.html>`__ to see an example of how to use delete_deploy_stage API.
"""
resource_path = "/deployStages/{deployStageId}"
method = "DELETE"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"delete_deploy_stage got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deployStageId": deploy_stage_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params)
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params)
def delete_project(self, project_id, **kwargs):
"""
Deletes a project resource by identifier
:param str project_id: (required)
Unique project identifier.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type None
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/delete_project.py.html>`__ to see an example of how to use delete_project API.
"""
resource_path = "/projects/{projectId}"
method = "DELETE"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"delete_project got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"projectId": project_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params)
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params)
def get_deploy_artifact(self, deploy_artifact_id, **kwargs):
"""
Retrieves a deployment artifact by identifier.
:param str deploy_artifact_id: (required)
Unique artifact identifier.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployArtifact`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/get_deploy_artifact.py.html>`__ to see an example of how to use get_deploy_artifact API.
"""
resource_path = "/deployArtifacts/{deployArtifactId}"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"get_deploy_artifact got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deployArtifactId": deploy_artifact_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="DeployArtifact")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="DeployArtifact")
def get_deploy_environment(self, deploy_environment_id, **kwargs):
"""
Retrieves a deployment environment by identifier.
:param str deploy_environment_id: (required)
Unique environment identifier.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployEnvironment`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/get_deploy_environment.py.html>`__ to see an example of how to use get_deploy_environment API.
"""
resource_path = "/deployEnvironments/{deployEnvironmentId}"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"get_deploy_environment got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deployEnvironmentId": deploy_environment_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="DeployEnvironment")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="DeployEnvironment")
def get_deploy_pipeline(self, deploy_pipeline_id, **kwargs):
"""
Retrieves a deployment pipeline by identifier.
:param str deploy_pipeline_id: (required)
Unique pipeline identifier.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployPipeline`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/get_deploy_pipeline.py.html>`__ to see an example of how to use get_deploy_pipeline API.
"""
resource_path = "/deployPipelines/{deployPipelineId}"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"get_deploy_pipeline got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deployPipelineId": deploy_pipeline_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="DeployPipeline")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="DeployPipeline")
def get_deploy_stage(self, deploy_stage_id, **kwargs):
"""
Retrieves a deployment stage by identifier.
:param str deploy_stage_id: (required)
Unique stage identifier.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployStage`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/get_deploy_stage.py.html>`__ to see an example of how to use get_deploy_stage API.
"""
resource_path = "/deployStages/{deployStageId}"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"get_deploy_stage got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deployStageId": deploy_stage_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="DeployStage")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="DeployStage")
def get_deployment(self, deployment_id, **kwargs):
"""
Retrieves a deployment by identifier.
:param str deployment_id: (required)
Unique deployment identifier.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.Deployment`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/get_deployment.py.html>`__ to see an example of how to use get_deployment API.
"""
resource_path = "/deployments/{deploymentId}"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"get_deployment got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deploymentId": deployment_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="Deployment")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="Deployment")
def get_project(self, project_id, **kwargs):
"""
Retrieves a project by identifier.
:param str project_id: (required)
Unique project identifier.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.Project`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/get_project.py.html>`__ to see an example of how to use get_project API.
"""
resource_path = "/projects/{projectId}"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"get_project got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"projectId": project_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="Project")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="Project")
def get_work_request(self, work_request_id, **kwargs):
"""
Retrieves the status of the work request with the given ID.
:param str work_request_id: (required)
The ID of the asynchronous work request.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.WorkRequest`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/get_work_request.py.html>`__ to see an example of how to use get_work_request API.
"""
resource_path = "/workRequests/{workRequestId}"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"get_work_request got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"workRequestId": work_request_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="WorkRequest")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
response_type="WorkRequest")
def list_deploy_artifacts(self, **kwargs):
"""
Returns a list of deployment artifacts.
:param str id: (optional)
Unique identifier or OCID for listing a single resource by ID.
:param str project_id: (optional)
unique project identifier
:param str compartment_id: (optional)
The OCID of the compartment in which to list resources.
:param str lifecycle_state: (optional)
A filter to return only DeployArtifacts that matches the given lifecycleState.
Allowed values are: "CREATING", "UPDATING", "ACTIVE", "DELETING", "DELETED", "FAILED"
:param str display_name: (optional)
A filter to return only resources that match the entire display name given.
:param int limit: (optional)
The maximum number of items to return.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param str sort_order: (optional)
The sort order to use. Use either ascending or descending.
Allowed values are: "ASC", "DESC"
:param str sort_by: (optional)
The field to sort by. Only one sort order may be provided. Default order for time created is descending. Default order for display name is ascending. If no value is specified, then the default time created value is considered.
Allowed values are: "timeCreated", "displayName"
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployArtifactCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/list_deploy_artifacts.py.html>`__ to see an example of how to use list_deploy_artifacts API.
"""
resource_path = "/deployArtifacts"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"id",
"project_id",
"compartment_id",
"lifecycle_state",
"display_name",
"limit",
"page",
"sort_order",
"sort_by",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_deploy_artifacts got unknown kwargs: {!r}".format(extra_kwargs))
if 'lifecycle_state' in kwargs:
lifecycle_state_allowed_values = ["CREATING", "UPDATING", "ACTIVE", "DELETING", "DELETED", "FAILED"]
if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values:
raise ValueError(
"Invalid value for `lifecycle_state`, must be one of {0}".format(lifecycle_state_allowed_values)
)
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["timeCreated", "displayName"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
query_params = {
"id": kwargs.get("id", missing),
"projectId": kwargs.get("project_id", missing),
"compartmentId": kwargs.get("compartment_id", missing),
"lifecycleState": kwargs.get("lifecycle_state", missing),
"displayName": kwargs.get("display_name", missing),
"limit": kwargs.get("limit", missing),
"page": kwargs.get("page", missing),
"sortOrder": kwargs.get("sort_order", missing),
"sortBy": kwargs.get("sort_by", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="DeployArtifactCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="DeployArtifactCollection")
def list_deploy_environments(self, **kwargs):
"""
Returns a list of deployment environments.
:param str project_id: (optional)
unique project identifier
:param str compartment_id: (optional)
The OCID of the compartment in which to list resources.
:param str id: (optional)
Unique identifier or OCID for listing a single resource by ID.
:param str lifecycle_state: (optional)
A filter to return only DeployEnvironments that matches the given lifecycleState.
Allowed values are: "CREATING", "UPDATING", "ACTIVE", "DELETING", "DELETED", "FAILED"
:param str display_name: (optional)
A filter to return only resources that match the entire display name given.
:param int limit: (optional)
The maximum number of items to return.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param str sort_order: (optional)
The sort order to use. Use either ascending or descending.
Allowed values are: "ASC", "DESC"
:param str sort_by: (optional)
The field to sort by. Only one sort order may be provided. Default order for time created is descending. Default order for display name is ascending. If no value is specified, then the default time created value is considered.
Allowed values are: "timeCreated", "displayName"
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployEnvironmentCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/list_deploy_environments.py.html>`__ to see an example of how to use list_deploy_environments API.
"""
resource_path = "/deployEnvironments"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"project_id",
"compartment_id",
"id",
"lifecycle_state",
"display_name",
"limit",
"page",
"sort_order",
"sort_by",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_deploy_environments got unknown kwargs: {!r}".format(extra_kwargs))
if 'lifecycle_state' in kwargs:
lifecycle_state_allowed_values = ["CREATING", "UPDATING", "ACTIVE", "DELETING", "DELETED", "FAILED"]
if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values:
raise ValueError(
"Invalid value for `lifecycle_state`, must be one of {0}".format(lifecycle_state_allowed_values)
)
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["timeCreated", "displayName"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
query_params = {
"projectId": kwargs.get("project_id", missing),
"compartmentId": kwargs.get("compartment_id", missing),
"id": kwargs.get("id", missing),
"lifecycleState": kwargs.get("lifecycle_state", missing),
"displayName": kwargs.get("display_name", missing),
"limit": kwargs.get("limit", missing),
"page": kwargs.get("page", missing),
"sortOrder": kwargs.get("sort_order", missing),
"sortBy": kwargs.get("sort_by", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="DeployEnvironmentCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="DeployEnvironmentCollection")
def list_deploy_pipelines(self, **kwargs):
"""
Returns a list of deployment pipelines.
:param str id: (optional)
Unique identifier or OCID for listing a single resource by ID.
:param str project_id: (optional)
unique project identifier
:param str compartment_id: (optional)
The OCID of the compartment in which to list resources.
:param str lifecycle_state: (optional)
A filter to return only DeployPipelines that matches the given lifecycleState.
Allowed values are: "CREATING", "UPDATING", "ACTIVE", "INACTIVE", "DELETING", "DELETED", "FAILED"
:param str display_name: (optional)
A filter to return only resources that match the entire display name given.
:param int limit: (optional)
The maximum number of items to return.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param str sort_order: (optional)
The sort order to use. Use either ascending or descending.
Allowed values are: "ASC", "DESC"
:param str sort_by: (optional)
The field to sort by. Only one sort order may be provided. Default order for time created is descending. Default order for display name is ascending. If no value is specified, then the default time created value is considered.
Allowed values are: "timeCreated", "displayName"
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployPipelineCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/list_deploy_pipelines.py.html>`__ to see an example of how to use list_deploy_pipelines API.
"""
resource_path = "/deployPipelines"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"id",
"project_id",
"compartment_id",
"lifecycle_state",
"display_name",
"limit",
"page",
"sort_order",
"sort_by",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_deploy_pipelines got unknown kwargs: {!r}".format(extra_kwargs))
if 'lifecycle_state' in kwargs:
lifecycle_state_allowed_values = ["CREATING", "UPDATING", "ACTIVE", "INACTIVE", "DELETING", "DELETED", "FAILED"]
if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values:
raise ValueError(
"Invalid value for `lifecycle_state`, must be one of {0}".format(lifecycle_state_allowed_values)
)
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["timeCreated", "displayName"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
query_params = {
"id": kwargs.get("id", missing),
"projectId": kwargs.get("project_id", missing),
"compartmentId": kwargs.get("compartment_id", missing),
"lifecycleState": kwargs.get("lifecycle_state", missing),
"displayName": kwargs.get("display_name", missing),
"limit": kwargs.get("limit", missing),
"page": kwargs.get("page", missing),
"sortOrder": kwargs.get("sort_order", missing),
"sortBy": kwargs.get("sort_by", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="DeployPipelineCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="DeployPipelineCollection")
def list_deploy_stages(self, **kwargs):
"""
Retrieves a list of deployment stages.
:param str id: (optional)
Unique identifier or OCID for listing a single resource by ID.
:param str deploy_pipeline_id: (optional)
The ID of the parent pipeline.
:param str compartment_id: (optional)
The OCID of the compartment in which to list resources.
:param str lifecycle_state: (optional)
A filter to return only deployment stages that matches the given lifecycle state.
Allowed values are: "CREATING", "UPDATING", "ACTIVE", "DELETING", "DELETED", "FAILED"
:param str display_name: (optional)
A filter to return only resources that match the entire display name given.
:param int limit: (optional)
The maximum number of items to return.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param str sort_order: (optional)
The sort order to use. Use either ascending or descending.
Allowed values are: "ASC", "DESC"
:param str sort_by: (optional)
The field to sort by. Only one sort order may be provided. Default order for time created is descending. Default order for display name is ascending. If no value is specified, then the default time created value is considered.
Allowed values are: "timeCreated", "displayName"
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployStageCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/list_deploy_stages.py.html>`__ to see an example of how to use list_deploy_stages API.
"""
resource_path = "/deployStages"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"id",
"deploy_pipeline_id",
"compartment_id",
"lifecycle_state",
"display_name",
"limit",
"page",
"sort_order",
"sort_by",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_deploy_stages got unknown kwargs: {!r}".format(extra_kwargs))
if 'lifecycle_state' in kwargs:
lifecycle_state_allowed_values = ["CREATING", "UPDATING", "ACTIVE", "DELETING", "DELETED", "FAILED"]
if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values:
raise ValueError(
"Invalid value for `lifecycle_state`, must be one of {0}".format(lifecycle_state_allowed_values)
)
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["timeCreated", "displayName"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
query_params = {
"id": kwargs.get("id", missing),
"deployPipelineId": kwargs.get("deploy_pipeline_id", missing),
"compartmentId": kwargs.get("compartment_id", missing),
"lifecycleState": kwargs.get("lifecycle_state", missing),
"displayName": kwargs.get("display_name", missing),
"limit": kwargs.get("limit", missing),
"page": kwargs.get("page", missing),
"sortOrder": kwargs.get("sort_order", missing),
"sortBy": kwargs.get("sort_by", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="DeployStageCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="DeployStageCollection")
def list_deployments(self, **kwargs):
"""
Returns a list of deployments.
:param str deploy_pipeline_id: (optional)
The ID of the parent pipeline.
:param str id: (optional)
Unique identifier or OCID for listing a single resource by ID.
:param str compartment_id: (optional)
The OCID of the compartment in which to list resources.
:param str project_id: (optional)
unique project identifier
:param str lifecycle_state: (optional)
A filter to return only Deployments that matches the given lifecycleState.
Allowed values are: "ACCEPTED", "IN_PROGRESS", "FAILED", "SUCCEEDED", "CANCELING", "CANCELED"
:param str display_name: (optional)
A filter to return only resources that match the entire display name given.
:param int limit: (optional)
The maximum number of items to return.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param str sort_order: (optional)
The sort order to use. Use either ascending or descending.
Allowed values are: "ASC", "DESC"
:param str sort_by: (optional)
The field to sort by. Only one sort order may be provided. Default order for time created is descending. Default order for display name is ascending. If no value is specified, then the default time created value is considered.
Allowed values are: "timeCreated", "displayName"
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param datetime time_created_less_than: (optional)
Search for DevOps resources that were created before a specific date. Specifying this parameter corresponding to `timeCreatedLessThan` parameter will retrieve all assessments created before the specified created date, in \"YYYY-MM-ddThh:mmZ\" format with a Z offset, as defined by `RFC3339`__.
__ https://datatracker.ietf.org/doc/html/rfc3339
:param datetime time_created_greater_than_or_equal_to: (optional)
Search for DevOps resources that were created after a specific date. Specifying this parameter corresponding to `timeCreatedGreaterThanOrEqualTo` parameter will retrieve all security assessments created after the specified created date, in \"YYYY-MM-ddThh:mmZ\" format with a Z offset, as defined by `RFC3339`__.
__ https://datatracker.ietf.org/doc/html/rfc3339
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeploymentCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/list_deployments.py.html>`__ to see an example of how to use list_deployments API.
"""
resource_path = "/deployments"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"deploy_pipeline_id",
"id",
"compartment_id",
"project_id",
"lifecycle_state",
"display_name",
"limit",
"page",
"sort_order",
"sort_by",
"opc_request_id",
"time_created_less_than",
"time_created_greater_than_or_equal_to"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_deployments got unknown kwargs: {!r}".format(extra_kwargs))
if 'lifecycle_state' in kwargs:
lifecycle_state_allowed_values = ["ACCEPTED", "IN_PROGRESS", "FAILED", "SUCCEEDED", "CANCELING", "CANCELED"]
if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values:
raise ValueError(
"Invalid value for `lifecycle_state`, must be one of {0}".format(lifecycle_state_allowed_values)
)
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["timeCreated", "displayName"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
query_params = {
"deployPipelineId": kwargs.get("deploy_pipeline_id", missing),
"id": kwargs.get("id", missing),
"compartmentId": kwargs.get("compartment_id", missing),
"projectId": kwargs.get("project_id", missing),
"lifecycleState": kwargs.get("lifecycle_state", missing),
"displayName": kwargs.get("display_name", missing),
"limit": kwargs.get("limit", missing),
"page": kwargs.get("page", missing),
"sortOrder": kwargs.get("sort_order", missing),
"sortBy": kwargs.get("sort_by", missing),
"timeCreatedLessThan": kwargs.get("time_created_less_than", missing),
"timeCreatedGreaterThanOrEqualTo": kwargs.get("time_created_greater_than_or_equal_to", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="DeploymentCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="DeploymentCollection")
def list_projects(self, compartment_id, **kwargs):
"""
Returns a list of projects.
:param str compartment_id: (required)
The OCID of the compartment in which to list resources.
:param str id: (optional)
Unique identifier or OCID for listing a single resource by ID.
:param str lifecycle_state: (optional)
A filter to return only Projects that matches the given lifecycleState.
Allowed values are: "CREATING", "UPDATING", "ACTIVE", "DELETING", "DELETED", "FAILED"
:param str name: (optional)
A filter to return only resources that match the entire name given.
:param int limit: (optional)
The maximum number of items to return.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param str sort_order: (optional)
The sort order to use. Use either ascending or descending.
Allowed values are: "ASC", "DESC"
:param str sort_by: (optional)
The field to sort by. Only one sort order may be provided. Default order for time created is descending. Default order for display name is ascending. If no value is specified, then the default time created value is considered.
Allowed values are: "timeCreated", "displayName"
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.ProjectCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/list_projects.py.html>`__ to see an example of how to use list_projects API.
"""
resource_path = "/projects"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"id",
"lifecycle_state",
"name",
"limit",
"page",
"sort_order",
"sort_by",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_projects got unknown kwargs: {!r}".format(extra_kwargs))
if 'lifecycle_state' in kwargs:
lifecycle_state_allowed_values = ["CREATING", "UPDATING", "ACTIVE", "DELETING", "DELETED", "FAILED"]
if kwargs['lifecycle_state'] not in lifecycle_state_allowed_values:
raise ValueError(
"Invalid value for `lifecycle_state`, must be one of {0}".format(lifecycle_state_allowed_values)
)
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["timeCreated", "displayName"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
query_params = {
"id": kwargs.get("id", missing),
"compartmentId": compartment_id,
"lifecycleState": kwargs.get("lifecycle_state", missing),
"name": kwargs.get("name", missing),
"limit": kwargs.get("limit", missing),
"page": kwargs.get("page", missing),
"sortOrder": kwargs.get("sort_order", missing),
"sortBy": kwargs.get("sort_by", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="ProjectCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="ProjectCollection")
def list_work_request_errors(self, work_request_id, **kwargs):
"""
Returns a list of errors for a given work request.
:param str work_request_id: (required)
The ID of the asynchronous work request.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param int limit: (optional)
The maximum number of items to return.
:param str sort_order: (optional)
The sort order to use. Use either ascending or descending.
Allowed values are: "ASC", "DESC"
:param str sort_by: (optional)
The field to sort by. Only one sort order can be provided. Default sort order is descending and is based on the timeAccepted field.
Allowed values are: "timeAccepted"
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.WorkRequestErrorCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/list_work_request_errors.py.html>`__ to see an example of how to use list_work_request_errors API.
"""
resource_path = "/workRequests/{workRequestId}/errors"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id",
"page",
"limit",
"sort_order",
"sort_by"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_work_request_errors got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"workRequestId": work_request_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["timeAccepted"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
query_params = {
"page": kwargs.get("page", missing),
"limit": kwargs.get("limit", missing),
"sortOrder": kwargs.get("sort_order", missing),
"sortBy": kwargs.get("sort_by", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="WorkRequestErrorCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="WorkRequestErrorCollection")
def list_work_request_logs(self, work_request_id, **kwargs):
"""
Returns a list of logs for a given work request.
:param str work_request_id: (required)
The ID of the asynchronous work request.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param int limit: (optional)
The maximum number of items to return.
:param str sort_order: (optional)
The sort order to use. Use either ascending or descending.
Allowed values are: "ASC", "DESC"
:param str sort_by: (optional)
The field to sort by. Only one sort order can be provided. Default sort order is descending and is based on the timeAccepted field.
Allowed values are: "timeAccepted"
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.WorkRequestLogEntryCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/list_work_request_logs.py.html>`__ to see an example of how to use list_work_request_logs API.
"""
resource_path = "/workRequests/{workRequestId}/logs"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"opc_request_id",
"page",
"limit",
"sort_order",
"sort_by"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_work_request_logs got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"workRequestId": work_request_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["timeAccepted"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
query_params = {
"page": kwargs.get("page", missing),
"limit": kwargs.get("limit", missing),
"sortOrder": kwargs.get("sort_order", missing),
"sortBy": kwargs.get("sort_by", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="WorkRequestLogEntryCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
query_params=query_params,
header_params=header_params,
response_type="WorkRequestLogEntryCollection")
def list_work_requests(self, compartment_id, **kwargs):
"""
Lists the work requests in a compartment.
:param str compartment_id: (required)
The OCID of the compartment in which to list resources.
:param str work_request_id: (optional)
The ID of the asynchronous work request.
:param str status: (optional)
A filter to return only resources where the lifecycle state matches the given operation status.
Allowed values are: "ACCEPTED", "IN_PROGRESS", "FAILED", "SUCCEEDED", "CANCELING", "CANCELED"
:param str resource_id: (optional)
The ID of the resource affected by the work request.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param str page: (optional)
The page token representing the page at which to start retrieving results. This is usually retrieved from a previous list call.
:param int limit: (optional)
The maximum number of items to return.
:param str sort_order: (optional)
The sort order to use. Use either ascending or descending.
Allowed values are: "ASC", "DESC"
:param str sort_by: (optional)
The field to sort by. Only one sort order can be provided. Default sort order is descending and is based on the timeAccepted field.
Allowed values are: "timeAccepted"
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.WorkRequestCollection`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/list_work_requests.py.html>`__ to see an example of how to use list_work_requests API.
"""
resource_path = "/workRequests"
method = "GET"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"work_request_id",
"status",
"resource_id",
"opc_request_id",
"page",
"limit",
"sort_order",
"sort_by"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"list_work_requests got unknown kwargs: {!r}".format(extra_kwargs))
if 'status' in kwargs:
status_allowed_values = ["ACCEPTED", "IN_PROGRESS", "FAILED", "SUCCEEDED", "CANCELING", "CANCELED"]
if kwargs['status'] not in status_allowed_values:
raise ValueError(
"Invalid value for `status`, must be one of {0}".format(status_allowed_values)
)
if 'sort_order' in kwargs:
sort_order_allowed_values = ["ASC", "DESC"]
if kwargs['sort_order'] not in sort_order_allowed_values:
raise ValueError(
"Invalid value for `sort_order`, must be one of {0}".format(sort_order_allowed_values)
)
if 'sort_by' in kwargs:
sort_by_allowed_values = ["timeAccepted"]
if kwargs['sort_by'] not in sort_by_allowed_values:
raise ValueError(
"Invalid value for `sort_by`, must be one of {0}".format(sort_by_allowed_values)
)
query_params = {
"compartmentId": compartment_id,
"workRequestId": kwargs.get("work_request_id", missing),
"status": kwargs.get("status", missing),
"resourceId": kwargs.get("resource_id", missing),
"page": kwargs.get("page", missing),
"limit": kwargs.get("limit", missing),
"sortOrder": kwargs.get("sort_order", missing),
"sortBy": kwargs.get("sort_by", missing)
}
query_params = {k: v for (k, v) in six.iteritems(query_params) if v is not missing and v is not None}
header_params = {
"accept": "application/json",
"content-type": "application/json",
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="WorkRequestCollection")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
query_params=query_params,
header_params=header_params,
response_type="WorkRequestCollection")
def update_deploy_artifact(self, deploy_artifact_id, update_deploy_artifact_details, **kwargs):
"""
Updates the deployment artifact.
:param str deploy_artifact_id: (required)
Unique artifact identifier.
:param oci.devops.models.UpdateDeployArtifactDetails update_deploy_artifact_details: (required)
The information to be updated.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployArtifact`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/update_deploy_artifact.py.html>`__ to see an example of how to use update_deploy_artifact API.
"""
resource_path = "/deployArtifacts/{deployArtifactId}"
method = "PUT"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"update_deploy_artifact got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deployArtifactId": deploy_artifact_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_deploy_artifact_details,
response_type="DeployArtifact")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_deploy_artifact_details,
response_type="DeployArtifact")
def update_deploy_environment(self, deploy_environment_id, update_deploy_environment_details, **kwargs):
"""
Updates the deployment environment.
:param str deploy_environment_id: (required)
Unique environment identifier.
:param oci.devops.models.UpdateDeployEnvironmentDetails update_deploy_environment_details: (required)
The information to be updated.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployEnvironment`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/update_deploy_environment.py.html>`__ to see an example of how to use update_deploy_environment API.
"""
resource_path = "/deployEnvironments/{deployEnvironmentId}"
method = "PUT"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"update_deploy_environment got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deployEnvironmentId": deploy_environment_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_deploy_environment_details,
response_type="DeployEnvironment")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_deploy_environment_details,
response_type="DeployEnvironment")
def update_deploy_pipeline(self, deploy_pipeline_id, update_deploy_pipeline_details, **kwargs):
"""
Updates the deployment pipeline.
:param str deploy_pipeline_id: (required)
Unique pipeline identifier.
:param oci.devops.models.UpdateDeployPipelineDetails update_deploy_pipeline_details: (required)
The information to be updated.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployPipeline`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/update_deploy_pipeline.py.html>`__ to see an example of how to use update_deploy_pipeline API.
"""
resource_path = "/deployPipelines/{deployPipelineId}"
method = "PUT"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"update_deploy_pipeline got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deployPipelineId": deploy_pipeline_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_deploy_pipeline_details,
response_type="DeployPipeline")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_deploy_pipeline_details,
response_type="DeployPipeline")
def update_deploy_stage(self, deploy_stage_id, update_deploy_stage_details, **kwargs):
"""
Updates the deployment stage.
:param str deploy_stage_id: (required)
Unique stage identifier.
:param oci.devops.models.UpdateDeployStageDetails update_deploy_stage_details: (required)
The information to be updated.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.DeployStage`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/update_deploy_stage.py.html>`__ to see an example of how to use update_deploy_stage API.
"""
resource_path = "/deployStages/{deployStageId}"
method = "PUT"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"update_deploy_stage got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deployStageId": deploy_stage_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_deploy_stage_details,
response_type="DeployStage")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_deploy_stage_details,
response_type="DeployStage")
def update_deployment(self, deployment_id, update_deployment_details, **kwargs):
"""
Updates the deployment.
:param str deployment_id: (required)
Unique deployment identifier.
:param oci.devops.models.UpdateDeploymentDetails update_deployment_details: (required)
The information to be updated.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.Deployment`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/update_deployment.py.html>`__ to see an example of how to use update_deployment API.
"""
resource_path = "/deployments/{deploymentId}"
method = "PUT"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"update_deployment got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"deploymentId": deployment_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_deployment_details,
response_type="Deployment")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_deployment_details,
response_type="Deployment")
def update_project(self, project_id, update_project_details, **kwargs):
"""
Updates the project.
:param str project_id: (required)
Unique project identifier.
:param oci.devops.models.UpdateProjectDetails update_project_details: (required)
The information to be updated.
:param str if_match: (optional)
For optimistic concurrency control. In the PUT or DELETE call for a resource, set the `if-match` parameter to the value of the etag from a previous GET or POST response for that resource. The resource will be updated or deleted only if the etag you provide matches the resource's current etag value.
:param str opc_request_id: (optional)
Unique Oracle-assigned identifier for the request. If you need to contact Oracle about a particular request, provide the request ID.
:param obj retry_strategy: (optional)
A retry strategy to apply to this specific operation/call. This will override any retry strategy set at the client-level.
This should be one of the strategies available in the :py:mod:`~oci.retry` module. A convenience :py:data:`~oci.retry.DEFAULT_RETRY_STRATEGY`
is also available. The specifics of the default retry strategy are described `here <https://docs.oracle.com/en-us/iaas/tools/python/latest/sdk_behaviors/retries.html>`__.
To have this operation explicitly not perform any retries, pass an instance of :py:class:`~oci.retry.NoneRetryStrategy`.
:return: A :class:`~oci.response.Response` object with data of type :class:`~oci.devops.models.Project`
:rtype: :class:`~oci.response.Response`
:example:
Click `here <https://docs.cloud.oracle.com/en-us/iaas/tools/python-sdk-examples/latest/devops/update_project.py.html>`__ to see an example of how to use update_project API.
"""
resource_path = "/projects/{projectId}"
method = "PUT"
# Don't accept unknown kwargs
expected_kwargs = [
"retry_strategy",
"if_match",
"opc_request_id"
]
extra_kwargs = [_key for _key in six.iterkeys(kwargs) if _key not in expected_kwargs]
if extra_kwargs:
raise ValueError(
"update_project got unknown kwargs: {!r}".format(extra_kwargs))
path_params = {
"projectId": project_id
}
path_params = {k: v for (k, v) in six.iteritems(path_params) if v is not missing}
for (k, v) in six.iteritems(path_params):
if v is None or (isinstance(v, six.string_types) and len(v.strip()) == 0):
raise ValueError('Parameter {} cannot be None, whitespace or empty string'.format(k))
header_params = {
"accept": "application/json",
"content-type": "application/json",
"if-match": kwargs.get("if_match", missing),
"opc-request-id": kwargs.get("opc_request_id", missing)
}
header_params = {k: v for (k, v) in six.iteritems(header_params) if v is not missing and v is not None}
retry_strategy = self.retry_strategy
if kwargs.get('retry_strategy'):
retry_strategy = kwargs.get('retry_strategy')
if retry_strategy:
return retry_strategy.make_retrying_call(
self.base_client.call_api,
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_project_details,
response_type="Project")
else:
return self.base_client.call_api(
resource_path=resource_path,
method=method,
path_params=path_params,
header_params=header_params,
body=update_project_details,
response_type="Project")
| 47.590087
| 397
| 0.640724
| 20,300
| 163,234
| 4.989261
| 0.024877
| 0.062252
| 0.017061
| 0.006289
| 0.932357
| 0.921743
| 0.913459
| 0.901493
| 0.89156
| 0.890109
| 0
| 0.000979
| 0.274348
| 163,234
| 3,429
| 398
| 47.603966
| 0.854075
| 0.425524
| 0
| 0.857915
| 0
| 0
| 0.174662
| 0.018967
| 0
| 0
| 0
| 0
| 0
| 1
| 0.019989
| false
| 0.00054
| 0.004862
| 0
| 0.06429
| 0.00054
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
77475f30cfb2a8731cc5d744b664913db78c9fa0
| 4,652
|
py
|
Python
|
math3/tests/test_rectangle.py
|
PhloxAR/math3
|
6c8e4066ae74b607cefa209c42cb19e26c09c600
|
[
"BSD-3-Clause"
] | null | null | null |
math3/tests/test_rectangle.py
|
PhloxAR/math3
|
6c8e4066ae74b607cefa209c42cb19e26c09c600
|
[
"BSD-3-Clause"
] | null | null | null |
math3/tests/test_rectangle.py
|
PhloxAR/math3
|
6c8e4066ae74b607cefa209c42cb19e26c09c600
|
[
"BSD-3-Clause"
] | null | null | null |
try:
import unittest2 as unittest
except:
import unittest
import numpy as np
from math3.funcs import rect
class test_rectangle(unittest.TestCase):
def test_import(self):
import math3
math3.funcs.rectfunc
def test_create(self):
result = rect.create()
np.testing.assert_almost_equal(result, [[0,0],[1,1]], decimal=5)
def test_create_dtype(self):
result = rect.create(dtype=np.float)
np.testing.assert_almost_equal(result, [[0.,0.],[1.,1.]], decimal=5)
def test_create_zeros(self):
result = rect.create_zeros()
np.testing.assert_almost_equal(result, [[0,0],[0,0]], decimal=5)
def test_create_from_bounds(self):
result = rect.create_from_bounds(-1, 1, -2, 2)
np.testing.assert_almost_equal(result, [[-1,-2],[2,4]], decimal=5)
def test_bounds(self):
rect = rect.create_from_bounds(-1, 1, -2, 2)
result = rect.bounds(rect)
np.testing.assert_almost_equal(result, (-1,1,-2,2), decimal=5)
def test_scale_by_vector(self):
result = rect.scale_by_vector([[-1., -2.], [2., 4.]], [2., 3.])
np.testing.assert_almost_equal(result, [[-2.,-6.],[4.,12.]], decimal=5)
def test_scale_by_vector3(self):
result = rect.scale_by_vector([[-1., -2.], [2., 4.]], [2., 3., 4.])
np.testing.assert_almost_equal(result, [[-2.,-6.],[4.,12.]], decimal=5)
def test_right(self):
result = rect.right([[1., 2.], [3., 4.]])
np.testing.assert_almost_equal(result, 4., decimal=5)
def test_right_negative(self):
result = rect.right([[1., 2.], [-3., -4.]])
np.testing.assert_almost_equal(result, 1., decimal=5)
def test_left(self):
result = rect.left([[1., 2.], [3., 4.]])
np.testing.assert_almost_equal(result, 1., decimal=5)
def test_left_negative(self):
result = rect.left([[1., 2.], [-3., -4.]])
np.testing.assert_almost_equal(result, -2., decimal=5)
def test_top(self):
result = rect.top([[1., 2.], [3., 4.]])
np.testing.assert_almost_equal(result, 6., decimal=5)
def test_top_negative(self):
result = rect.top([[1., 2.], [-3., -4.]])
np.testing.assert_almost_equal(result, 2., decimal=5)
def test_bottom(self):
result = rect.bottom([[1., 2.], [3., 4.]])
np.testing.assert_almost_equal(result, 2., decimal=5)
def test_bottom_negative(self):
result = rect.bottom([[1., 2.], [-3., -4.]])
np.testing.assert_almost_equal(result, -2., decimal=5)
def test_x(self):
result = rect.x([[1., 2.], [3., 4.]])
np.testing.assert_almost_equal(result, 1., decimal=5)
def test_x_negative(self):
result = rect.x([[1., 2.], [-3., -4.]])
np.testing.assert_almost_equal(result, 1., decimal=5)
def test_y(self):
result = rect.y([[1., 2.], [3., 4.]])
np.testing.assert_almost_equal(result, 2., decimal=5)
def test_y_negative(self):
result = rect.y([[1., 2.], [-3., -4.]])
np.testing.assert_almost_equal(result, 2., decimal=5)
def test_width(self):
result = rect.width([[1., 2.], [3., 4.]])
np.testing.assert_almost_equal(result, 3., decimal=5)
def test_width_negative(self):
result = rect.width([[1., 2.], [-3., -4.]])
np.testing.assert_almost_equal(result, -3., decimal=5)
def test_height(self):
result = rect.height([[1., 2.], [3., 4.]])
np.testing.assert_almost_equal(result, 4., decimal=5)
def test_height_negative(self):
result = rect.height([[1., 2.], [-3., -4.]])
np.testing.assert_almost_equal(result, -4., decimal=5)
def test_abs_height(self):
result = rect.abs_height([[1., 2.], [3., 4.]])
np.testing.assert_almost_equal(result, 4., decimal=5)
def test_abs_height_negative(self):
result = rect.abs_height([[1., 2.], [-3., -4.]])
np.testing.assert_almost_equal(result, 4., decimal=5)
def test_abs_width(self):
result = rect.abs_width([[1., 2.], [3., 4.]])
np.testing.assert_almost_equal(result, 3., decimal=5)
def test_abs_width_negative(self):
result = rect.abs_width([[1., 2.], [-3., -4.]])
np.testing.assert_almost_equal(result, 3., decimal=5)
def test_position(self):
result = rect.position([[1., 2.], [-3., -4.]])
np.testing.assert_almost_equal(result, [1.,2.], decimal=5)
def test_size(self):
result = rect.size([[1., 2.], [-3., -4.]])
np.testing.assert_almost_equal(result, [-3.,-4.], decimal=5)
if __name__ == '__main__':
unittest.main()
| 34.459259
| 79
| 0.588349
| 674
| 4,652
| 3.864985
| 0.08457
| 0.080614
| 0.166987
| 0.233781
| 0.819194
| 0.764683
| 0.74856
| 0.735893
| 0.690979
| 0.690979
| 0
| 0.054002
| 0.215821
| 4,652
| 134
| 80
| 34.716418
| 0.660088
| 0
| 0
| 0.19
| 0
| 0
| 0.00172
| 0
| 0
| 0
| 0
| 0
| 0.29
| 1
| 0.3
| false
| 0
| 0.06
| 0
| 0.37
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6200e90fe7a8913b663b83bfc42dc6ead61522dc
| 396
|
py
|
Python
|
src/apd/aggregation/exceptions.py
|
MatthewWilkes/apd.aggregation
|
427fa908f45332d623295f92e1ccfdaf545d6997
|
[
"BSD-3-Clause"
] | null | null | null |
src/apd/aggregation/exceptions.py
|
MatthewWilkes/apd.aggregation
|
427fa908f45332d623295f92e1ccfdaf545d6997
|
[
"BSD-3-Clause"
] | 11
|
2020-11-23T21:36:48.000Z
|
2022-03-12T00:48:58.000Z
|
src/apd/aggregation/exceptions.py
|
MatthewWilkes/apd.aggregation
|
427fa908f45332d623295f92e1ccfdaf545d6997
|
[
"BSD-3-Clause"
] | 1
|
2020-08-09T01:47:59.000Z
|
2020-08-09T01:47:59.000Z
|
class NoDataForTrigger(ValueError):
"""An error that's raised when a trigger is passed
a data point that cannot be handled due to an incompatible
value being stored"""
pass
class IncompatibleTriggerError(NoDataForTrigger):
"""An error that's raised when a trigger is passed
a data point that cannot be handled due to an incompatible
value being stored"""
pass
| 26.4
| 62
| 0.727273
| 56
| 396
| 5.142857
| 0.464286
| 0.048611
| 0.076389
| 0.083333
| 0.736111
| 0.736111
| 0.736111
| 0.736111
| 0.736111
| 0.736111
| 0
| 0
| 0.222222
| 396
| 14
| 63
| 28.285714
| 0.935065
| 0.633838
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.5
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
620464f544a69582d27f577ae54a544c71772fc9
| 22,202
|
py
|
Python
|
validator/tests/test_responder/tests.py
|
mer1dian/sprawl
|
e52cba9dda418e6c00122669fe465686cb8e018f
|
[
"Apache-2.0"
] | null | null | null |
validator/tests/test_responder/tests.py
|
mer1dian/sprawl
|
e52cba9dda418e6c00122669fe465686cb8e018f
|
[
"Apache-2.0"
] | null | null | null |
validator/tests/test_responder/tests.py
|
mer1dian/sprawl
|
e52cba9dda418e6c00122669fe465686cb8e018f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from sawtooth_validator.protobuf import network_pb2
from sawtooth_validator.protobuf import validator_pb2
from sawtooth_validator.protobuf import block_pb2
from sawtooth_validator.protobuf import batch_pb2
from sawtooth_validator.protobuf import transaction_pb2
from sawtooth_validator.journal.responder import Responder
from sawtooth_validator.journal.responder import BlockResponderHandler
from sawtooth_validator.journal.responder import BatchByBatchIdResponderHandler
from sawtooth_validator.journal.responder import \
BatchByTransactionIdResponderHandler
from sawtooth_validator.journal.responder import ResponderBlockResponseHandler
from sawtooth_validator.journal.responder import ResponderBatchResponseHandler
from test_responder.mock import MockGossip
from test_responder.mock import MockCompleter
class TestResponder(unittest.TestCase):
def setUp(self):
self.gossip = MockGossip()
self.completer = MockCompleter()
self.responder = Responder(self.completer)
self.block_request_handler = \
BlockResponderHandler(self.responder, self.gossip)
self.block_response_handler = \
ResponderBlockResponseHandler(self.responder, self.gossip)
self.batch_request_handler = \
BatchByBatchIdResponderHandler(self.responder, self.gossip)
self.batch_response_handler = \
ResponderBatchResponseHandler(self.responder, self.gossip)
self.batch_by_txn_request_handler = \
BatchByTransactionIdResponderHandler(self.responder, self.gossip)
# Tests
def test_block_responder_handler(self):
"""
Test that the BlockResponderHandler correctly broadcasts a received
request that the Responder cannot respond to, or sends a
GossipBlockResponse back to the connection_id the handler received
the request from.
"""
# The completer does not have the requested block
message = network_pb2.GossipBlockRequest(
block_id="ABC",
node_id=b"1",
nonce="1")
self.block_request_handler.handle(
"Connection_1", message.SerializeToString())
# If we cannot respond to the request, broadcast the block request
# and add to pending request
self.assert_message_was_broadcasted(
message, validator_pb2.Message.GOSSIP_BLOCK_REQUEST)
self.assert_request_pending(
requested_id="ABC", connection_id="Connection_1")
self.assert_message_not_sent(connection_id="Connection_1")
# Add the block to the completer and resend the Block Request
block = block_pb2.Block(header_signature="ABC")
self.completer.add_block(block)
message = network_pb2.GossipBlockRequest(
block_id="ABC",
node_id=b"1",
nonce="2")
self.block_request_handler.handle(
"Connection_1", message.SerializeToString())
# Check that the a Block Response was sent back to "Connection_1"
self.assert_message_sent(
connection_id="Connection_1",
message_type=validator_pb2.Message.GOSSIP_BLOCK_RESPONSE
)
def test_block_responder_handler_requested(self):
"""
Test that the BlockResponderHandler correctly broadcasts a received
request that the Responder cannot respond to, and does not rebroadcast
the same request. If we have already recieved the
request, do nothing.
"""
message = network_pb2.GossipBlockRequest(
block_id="ABC",
node_id=b"1",
nonce="1"
)
self.block_request_handler.handle(
"Connection_1", message.SerializeToString())
# If we cannot respond to the request, broadcast the block request
# and add to pending request
self.assert_message_was_broadcasted(
message, validator_pb2.Message.GOSSIP_BLOCK_REQUEST)
self.assert_request_pending(
requested_id="ABC", connection_id="Connection_1")
self.assert_message_not_sent(connection_id="Connection_1")
self.gossip.clear()
# Message should be dropped since the same message has already been
# handled
self.block_request_handler.handle(
"Connection_2", message.SerializeToString())
self.assert_message_was_not_broadcasted(
message, validator_pb2.Message.GOSSIP_BLOCK_REQUEST)
self.assert_request_not_pending(
requested_id="ABC", connection_id="Connection_2")
message = network_pb2.GossipBlockRequest(
block_id="ABC",
node_id=b"1",
nonce="2"
)
self.block_request_handler.handle(
"Connection_2", message.SerializeToString())
self.assert_message_was_not_broadcasted(
message, validator_pb2.Message.GOSSIP_BLOCK_REQUEST)
self.assert_request_pending(
requested_id="ABC", connection_id="Connection_2")
self.assert_message_not_sent(connection_id="Connection_2")
def test_responder_block_response_handler(self):
"""
Test that the ResponderBlockResponseHandler, after receiving a Block
Response, checks to see if the responder has any pending request for
that response and forwards the response on to the connection_id that
had requested it.
"""
# The Responder does not have any pending requests for block "ABC"
block = block_pb2.Block(header_signature="ABC")
response_message = network_pb2.GossipBlockResponse(
content=block.SerializeToString(), node_id=b"1")
self.block_response_handler.handle(
"Connection_1", response_message.SerializeToString())
# ResponderBlockResponseHandler should not send any messages.
self.assert_message_not_sent("Connection_1")
self.assert_request_not_pending(requested_id="ABC")
# Handle a request message for block "ABC". This adds it to the pending
# request queue.
request_message = \
network_pb2.GossipBlockRequest(block_id="ABC", node_id=b"1")
self.block_request_handler.handle(
"Connection_2", request_message.SerializeToString())
self.assert_request_pending(
requested_id="ABC", connection_id="Connection_2")
# Handle the the BlockResponse Message. Since Connection_2 had
# requested the block but it could not be fulfilled at that time of the
# request the received BlockResponse is forwarded to Connection_2
self.block_response_handler.handle(
"Connection_1", response_message.SerializeToString())
self.assert_message_sent(
connection_id="Connection_2",
message_type=validator_pb2.Message.GOSSIP_BLOCK_RESPONSE
)
# The request for block "ABC" from "Connection_2" is no longer pending
# it should be removed from the pending request cache.
self.assert_request_not_pending(requested_id="ABC")
def test_batch_by_id_responder_handler(self):
"""
Test that the BatchByBatchIdResponderHandler correctly broadcasts a
received request that the Responder cannot respond to, or sends a
GossipBatchResponse back to the connection_id the handler received
the request from.
"""
# The completer does not have the requested batch
message = network_pb2.GossipBatchByBatchIdRequest(
id="abc",
node_id=b"1",
nonce="1")
self.batch_request_handler.handle(
"Connection_1", message.SerializeToString())
# If we cannot respond to the request broadcast batch request and add
# to pending request
self.assert_message_was_broadcasted(
message, validator_pb2.Message.GOSSIP_BATCH_BY_BATCH_ID_REQUEST)
self.assert_request_pending(
requested_id="abc", connection_id="Connection_1")
self.assert_message_not_sent(connection_id="Connection_1")
# Add the batch to the completer and resend the BatchByBatchIdRequest
message = network_pb2.GossipBatchByBatchIdRequest(
id="abc",
node_id=b"1",
nonce="2")
batch = batch_pb2.Batch(header_signature="abc")
self.completer.add_batch(batch)
self.batch_request_handler.handle(
"Connection_1", message.SerializeToString())
# Check that the a Batch Response was sent back to "Connection_1"
self.assert_message_sent(
connection_id="Connection_1",
message_type=validator_pb2.Message.GOSSIP_BATCH_RESPONSE
)
def test_batch_by_id_responder_handler_requested(self):
"""
Test that the BatchByBatchIdResponderHandler correctly broadcasts
a received request that the Responder cannot respond to, and does not
rebroadcast the same request again. If we have already recieved the
request, do nothing.
"""
# The completer does not have the requested batch
message = network_pb2.GossipBatchByBatchIdRequest(
id="abc",
node_id=b"1",
nonce="1")
self.batch_request_handler.handle(
"Connection_1", message.SerializeToString())
# If we cannot respond to the request broadcast batch request and add
# to pending request
self.assert_message_was_broadcasted(
message, validator_pb2.Message.GOSSIP_BATCH_BY_BATCH_ID_REQUEST)
self.assert_request_pending(
requested_id="abc", connection_id="Connection_1")
self.assert_message_not_sent(connection_id="Connection_1")
self.gossip.clear()
# Message should be dropped since the same message has already been
# handled
self.batch_request_handler.handle(
"Connection_2", message.SerializeToString())
self.assert_message_was_not_broadcasted(
message, validator_pb2.Message.GOSSIP_BATCH_BY_BATCH_ID_REQUEST)
self.assert_request_not_pending(
requested_id="abc", connection_id="Connection_2")
message = network_pb2.GossipBatchByBatchIdRequest(
id="abc",
node_id=b"1",
nonce="2")
self.batch_request_handler.handle(
"Connection_2", message.SerializeToString())
self.assert_message_was_not_broadcasted(
message, validator_pb2.Message.GOSSIP_BATCH_BY_BATCH_ID_REQUEST)
self.assert_request_pending(
requested_id="abc", connection_id="Connection_2")
self.assert_message_not_sent(connection_id="Connection_2")
def test_batch_by_transaction_id_response_handler(self):
"""
Test that the BatchByTransactionIdResponderHandler correctly broadcasts
a received request that the Responder cannot respond to, or sends a
GossipBatchResponse back to the connection_id the handler received
the request from.
"""
# The completer does not have the requested batch with the transaction
message = network_pb2.GossipBatchByTransactionIdRequest(
ids=["123"],
node_id=b"1",
nonce="1")
self.batch_by_txn_request_handler.handle(
"Connection_1", message.SerializeToString())
# If we cannot respond to the request, broadcast batch request and add
# to pending request
self.assert_message_was_broadcasted(
message,
validator_pb2.Message.GOSSIP_BATCH_BY_TRANSACTION_ID_REQUEST
)
self.assert_request_pending(
requested_id="123", connection_id="Connection_1")
self.assert_message_not_sent(connection_id="Connection_1")
# Add the batch to the completer and resend the
# BatchByTransactionIdRequest
message = network_pb2.GossipBatchByTransactionIdRequest(
ids=["123"],
node_id=b"1",
nonce="2")
transaction = transaction_pb2.Transaction(header_signature="123")
batch = batch_pb2.Batch(
header_signature="abc", transactions=[transaction])
self.completer.add_batch(batch)
self.batch_request_handler.handle(
"Connection_1", message.SerializeToString())
# Check that the a Batch Response was sent back to "Connection_1"
self.assert_message_sent(
connection_id="Connection_1",
message_type=validator_pb2.Message.GOSSIP_BATCH_RESPONSE
)
def test_batch_by_transaction_id_response_handler_requested(self):
"""
Test that the BatchByTransactionIdResponderHandler correctly broadcasts
a received request that the Responder cannot respond to, and does not
rebroadcast the same request again. If we have already recieved the
request, do nothing.
"""
# The completer does not have the requested batch with the transaction
message = network_pb2.GossipBatchByTransactionIdRequest(
ids=["123"], node_id=b"1")
self.batch_by_txn_request_handler.handle(
"Connection_1", message.SerializeToString())
# If we cannot respond to the request, broadcast batch request and add
# to pending request
self.assert_message_was_broadcasted(
message,
validator_pb2.Message.GOSSIP_BATCH_BY_TRANSACTION_ID_REQUEST
)
self.assert_request_pending(
requested_id="123", connection_id="Connection_1")
self.assert_message_not_sent(connection_id="Connection_1")
self.gossip.clear()
# Message should be dropped since the same message has already been
# handled
self.batch_by_txn_request_handler.handle(
"Connection_2", message.SerializeToString())
self.assert_message_was_not_broadcasted(
message,
validator_pb2.Message.GOSSIP_BATCH_BY_TRANSACTION_ID_REQUEST
)
self.assert_request_not_pending(
requested_id="123", connection_id="Connection_2")
message = network_pb2.GossipBatchByTransactionIdRequest(
ids=["123"],
node_id=b"1",
nonce="2")
self.batch_by_txn_request_handler.handle(
"Connection_2", message.SerializeToString())
self.assert_message_was_not_broadcasted(
message,
validator_pb2.Message.GOSSIP_BATCH_BY_TRANSACTION_ID_REQUEST
)
self.assert_request_pending(
requested_id="123", connection_id="Connection_2")
self.assert_message_not_sent(connection_id="Connection_2")
def test_batch_by_transaction_id_multiple_txn_ids(self):
"""
Test that the BatchByTransactionIdResponderHandler correctly broadcasts
a new request with only the transaction_ids that the Responder cannot
respond to, and sends a GossipBatchResponse for the transactions_id
requests that can be satisfied.
"""
# Add batch that has txn 123
transaction = transaction_pb2.Transaction(header_signature="123")
batch = batch_pb2.Batch(
header_signature="abc", transactions=[transaction])
self.completer.add_batch(batch)
# Request transactions 123 and 456
message = network_pb2.GossipBatchByTransactionIdRequest(
ids=["123", "456"], node_id=b"1")
self.batch_by_txn_request_handler.handle(
"Connection_1", message.SerializeToString())
self.batch_request_handler.handle(
"Connection_1", message.SerializeToString())
# Respond with a BatchResponse for transaction 123
self.assert_message_sent(
connection_id="Connection_1",
message_type=validator_pb2.Message.GOSSIP_BATCH_RESPONSE
)
# Broadcast a BatchByTransactionIdRequest for just 456
request_message = \
network_pb2.GossipBatchByTransactionIdRequest(
ids=["456"], node_id=b"1")
self.assert_message_was_broadcasted(
request_message,
validator_pb2.Message.GOSSIP_BATCH_BY_TRANSACTION_ID_REQUEST)
# And set a pending request for 456
self.assert_request_pending(
requested_id="456", connection_id="Connection_1")
def test_responder_batch_response_handler(self):
"""
Test that the ResponderBatchResponseHandler, after receiving a Batch
Response, checks to see if the responder has any pending request for
that batch and forwards the response on to the connection_id that
had requested it.
"""
# The Responder does not have any pending requests for block "ABC"
batch = batch_pb2.Batch(header_signature="abc")
response_message = network_pb2.GossipBatchResponse(
content=batch.SerializeToString(), node_id=b"1")
self.batch_response_handler.handle(
"Connection_1", response_message.SerializeToString())
# ResponderBlockResponseHandler should not send any messages.
self.assert_message_not_sent("Connection_1")
self.assert_request_not_pending(requested_id="abc")
# Handle a request message for batch "abc". This adds it to the pending
# request queue.
request_message = \
network_pb2.GossipBatchByBatchIdRequest(id="abc", node_id=b"1")
self.batch_request_handler.handle(
"Connection_2", request_message.SerializeToString())
self.assert_request_pending(
requested_id="abc", connection_id="Connection_2")
# Handle the the BatchResponse Message. Since Connection_2 had
# requested the batch but it could not be fulfilled at that time of the
# request the received BatchResponse is forwarded to Connection_2
self.batch_response_handler.handle(
"Connection_1", response_message.SerializeToString())
self.assert_message_sent(
connection_id="Connection_2",
message_type=validator_pb2.Message.GOSSIP_BATCH_RESPONSE
)
# The request for batch "abc" from "Connection_2" is no longer pending
# it should be removed from the pending request cache.
self.assert_request_not_pending(requested_id="abc")
def test_responder_batch_response_txn_handler(self):
"""
Test that the ResponderBatchResponseHandler, after receiving a Batch
Response, checks to see if the responder has any pending request for
that transactions in the batch and forwards the response on to the
connection_id that had them.
"""
transaction = transaction_pb2.Transaction(header_signature="123")
batch = batch_pb2.Batch(
header_signature="abc", transactions=[transaction])
response_message = network_pb2.GossipBatchResponse(
content=batch.SerializeToString(), node_id=b"1")
request_message = \
network_pb2.GossipBatchByTransactionIdRequest(
ids=["123"], node_id=b"1")
# Send BatchByTransaciontIdRequest for txn "123" and add it to the
# pending request cache
self.batch_request_handler.handle(
"Connection_2", request_message.SerializeToString())
self.assert_request_pending(
requested_id="123", connection_id="Connection_2")
# Send Batch Response that contains the batch that has txn "123"
self.batch_response_handler.handle(
"Connection_1", response_message.SerializeToString())
# Handle the the BatchResponse Message. Since Connection_2 had
# requested the txn_id in the batch but it could not be fulfilled at
# that time of the request the received BatchResponse is forwarded to
# Connection_2
self.assert_message_sent(
connection_id="Connection_2",
message_type=validator_pb2.Message.GOSSIP_BATCH_RESPONSE
)
# The request for transaction_id "123" from "Connection_2" is no
# longer pending it should be removed from the pending request cache.
self.assert_request_not_pending(requested_id="123")
# assertions
def assert_message_was_broadcasted(self, message, message_type):
self.assertIn(message, self.gossip.broadcasted[message_type])
def assert_message_was_not_broadcasted(self, message, message_type):
if message_type in self.gossip.broadcasted:
self.assertNotIn(message, self.gossip.broadcasted[message_type])
else:
self.assertIsNone(self.gossip.broadcasted.get(message_type))
def assert_message_not_sent(self, connection_id):
self.assertIsNone(self.gossip.sent.get(connection_id))
def assert_message_sent(self, connection_id, message_type):
self.assertIsNotNone(self.gossip.sent.get(connection_id))
self.assertTrue(self.gossip.sent.get(connection_id)[0][0] == \
message_type)
def assert_request_pending(self, requested_id, connection_id):
self.assertIn(connection_id, self.responder.get_request(requested_id))
def assert_request_not_pending(self, requested_id, connection_id=None):
if self.responder.get_request(requested_id) is not None:
self.assertFalse(
connection_id in self.responder.get_request(requested_id))
else:
self.assertIsNone(self.responder.get_request(requested_id))
| 42.129032
| 79
| 0.685704
| 2,536
| 22,202
| 5.750789
| 0.079653
| 0.035656
| 0.048272
| 0.010971
| 0.854361
| 0.827825
| 0.743897
| 0.721476
| 0.709888
| 0.701179
| 0
| 0.014817
| 0.246149
| 22,202
| 526
| 80
| 42.209125
| 0.856545
| 0.26975
| 0
| 0.724026
| 0
| 0
| 0.056556
| 0
| 0
| 0
| 0
| 0
| 0.217532
| 1
| 0.055195
| false
| 0
| 0.045455
| 0
| 0.103896
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6239a00bad7194fee5c556c9040a17cb8d4668d8
| 230
|
py
|
Python
|
lightning_transformers/task/nlp/token_classification/__init__.py
|
maksym-taranukhin/lightning-transformers
|
aa7202657973b5b65c3c36eb745621043859ebc4
|
[
"Apache-2.0"
] | 451
|
2021-04-21T15:53:59.000Z
|
2022-03-29T10:39:45.000Z
|
lightning_transformers/task/nlp/token_classification/__init__.py
|
mathemusician/lightning-transformers
|
b2ef06113433e6a178ce4d3c9df7ede8064e247f
|
[
"Apache-2.0"
] | 92
|
2021-04-21T18:42:58.000Z
|
2022-03-30T05:29:54.000Z
|
lightning_transformers/task/nlp/token_classification/__init__.py
|
mathemusician/lightning-transformers
|
b2ef06113433e6a178ce4d3c9df7ede8064e247f
|
[
"Apache-2.0"
] | 51
|
2021-04-22T05:35:28.000Z
|
2022-03-17T13:08:12.000Z
|
from lightning_transformers.task.nlp.token_classification.data import TokenClassificationDataModule # noqa: F401
from lightning_transformers.task.nlp.token_classification.model import TokenClassificationTransformer # noqa: F401
| 76.666667
| 115
| 0.878261
| 24
| 230
| 8.25
| 0.583333
| 0.131313
| 0.252525
| 0.292929
| 0.515152
| 0.515152
| 0.515152
| 0
| 0
| 0
| 0
| 0.028037
| 0.069565
| 230
| 2
| 116
| 115
| 0.897196
| 0.091304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
623f72d93bdc0b847ca08c6f0fba214d41168205
| 69,617
|
py
|
Python
|
sdk/python/pulumi_gcp/cloudbuild/trigger.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/cloudbuild/trigger.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_gcp/cloudbuild/trigger.py
|
la3mmchen/pulumi-gcp
|
0e3c6fecd062dff78a4fd95b7ebd5ce4492ad1ea
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['TriggerArgs', 'Trigger']
@pulumi.input_type
class TriggerArgs:
def __init__(__self__, *,
build: Optional[pulumi.Input['TriggerBuildArgs']] = None,
description: Optional[pulumi.Input[str]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
filename: Optional[pulumi.Input[str]] = None,
github: Optional[pulumi.Input['TriggerGithubArgs']] = None,
ignored_files: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
included_files: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
pubsub_config: Optional[pulumi.Input['TriggerPubsubConfigArgs']] = None,
service_account: Optional[pulumi.Input[str]] = None,
substitutions: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
trigger_template: Optional[pulumi.Input['TriggerTriggerTemplateArgs']] = None,
webhook_config: Optional[pulumi.Input['TriggerWebhookConfigArgs']] = None):
"""
The set of arguments for constructing a Trigger resource.
:param pulumi.Input['TriggerBuildArgs'] build: Contents of the build template. Either a filename or build template must be provided.
Structure is documented below.
:param pulumi.Input[str] description: Human-readable description of the trigger.
:param pulumi.Input[bool] disabled: Whether the trigger is disabled or not. If true, the trigger will never result in a build.
:param pulumi.Input[str] filename: Path, from the source root, to a file whose contents is used for the template. Either a filename or build template must be provided.
:param pulumi.Input['TriggerGithubArgs'] github: Describes the configuration of a trigger that creates a build whenever a GitHub event is received.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] ignored_files: ignoredFiles and includedFiles are file glob matches using https://golang.org/pkg/path/filepath/#Match
extended with support for `**`.
If ignoredFiles and changed files are both empty, then they are not
used to determine whether or not to trigger a build.
If ignoredFiles is not empty, then we ignore any files that match any
of the ignored_file globs. If the change has no files that are outside
of the ignoredFiles globs, then we do not trigger a build.
:param pulumi.Input[Sequence[pulumi.Input[str]]] included_files: ignoredFiles and includedFiles are file glob matches using https://golang.org/pkg/path/filepath/#Match
extended with support for `**`.
If any of the files altered in the commit pass the ignoredFiles filter
and includedFiles is empty, then as far as this filter is concerned, we
should trigger the build.
If any of the files altered in the commit pass the ignoredFiles filter
and includedFiles is not empty, then we make sure that at least one of
those files matches a includedFiles glob. If not, then we do not trigger
a build.
:param pulumi.Input[str] name: Name of the volume to mount.
Volume names must be unique per build step and must be valid names for Docker volumes.
Each named volume must be used by at least two build steps.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input['TriggerPubsubConfigArgs'] pubsub_config: PubsubConfig describes the configuration of a trigger that creates
a build whenever a Pub/Sub message is published.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
:param pulumi.Input[str] service_account: The service account used for all user-controlled operations including
triggers.patch, triggers.run, builds.create, and builds.cancel.
If no service account is set, then the standard Cloud Build service account
([PROJECT_NUM]@system.gserviceaccount.com) will be used instead.
Format: projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT_ID_OR_EMAIL}
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] substitutions: Substitutions to use in a triggered build. Should only be used with triggers.run
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: Tags for annotation of a Build. These are not docker tags.
:param pulumi.Input['TriggerTriggerTemplateArgs'] trigger_template: Template describing the types of source changes to trigger a build.
Branch and tag names in trigger templates are interpreted as regular
expressions. Any branch or tag change that matches that regular
expression will trigger a build.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
:param pulumi.Input['TriggerWebhookConfigArgs'] webhook_config: WebhookConfig describes the configuration of a trigger that creates
a build whenever a webhook is sent to a trigger's webhook URL.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
if build is not None:
pulumi.set(__self__, "build", build)
if description is not None:
pulumi.set(__self__, "description", description)
if disabled is not None:
pulumi.set(__self__, "disabled", disabled)
if filename is not None:
pulumi.set(__self__, "filename", filename)
if github is not None:
pulumi.set(__self__, "github", github)
if ignored_files is not None:
pulumi.set(__self__, "ignored_files", ignored_files)
if included_files is not None:
pulumi.set(__self__, "included_files", included_files)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if pubsub_config is not None:
pulumi.set(__self__, "pubsub_config", pubsub_config)
if service_account is not None:
pulumi.set(__self__, "service_account", service_account)
if substitutions is not None:
pulumi.set(__self__, "substitutions", substitutions)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if trigger_template is not None:
pulumi.set(__self__, "trigger_template", trigger_template)
if webhook_config is not None:
pulumi.set(__self__, "webhook_config", webhook_config)
@property
@pulumi.getter
def build(self) -> Optional[pulumi.Input['TriggerBuildArgs']]:
"""
Contents of the build template. Either a filename or build template must be provided.
Structure is documented below.
"""
return pulumi.get(self, "build")
@build.setter
def build(self, value: Optional[pulumi.Input['TriggerBuildArgs']]):
pulumi.set(self, "build", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Human-readable description of the trigger.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def disabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether the trigger is disabled or not. If true, the trigger will never result in a build.
"""
return pulumi.get(self, "disabled")
@disabled.setter
def disabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "disabled", value)
@property
@pulumi.getter
def filename(self) -> Optional[pulumi.Input[str]]:
"""
Path, from the source root, to a file whose contents is used for the template. Either a filename or build template must be provided.
"""
return pulumi.get(self, "filename")
@filename.setter
def filename(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "filename", value)
@property
@pulumi.getter
def github(self) -> Optional[pulumi.Input['TriggerGithubArgs']]:
"""
Describes the configuration of a trigger that creates a build whenever a GitHub event is received.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
return pulumi.get(self, "github")
@github.setter
def github(self, value: Optional[pulumi.Input['TriggerGithubArgs']]):
pulumi.set(self, "github", value)
@property
@pulumi.getter(name="ignoredFiles")
def ignored_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
ignoredFiles and includedFiles are file glob matches using https://golang.org/pkg/path/filepath/#Match
extended with support for `**`.
If ignoredFiles and changed files are both empty, then they are not
used to determine whether or not to trigger a build.
If ignoredFiles is not empty, then we ignore any files that match any
of the ignored_file globs. If the change has no files that are outside
of the ignoredFiles globs, then we do not trigger a build.
"""
return pulumi.get(self, "ignored_files")
@ignored_files.setter
def ignored_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "ignored_files", value)
@property
@pulumi.getter(name="includedFiles")
def included_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
ignoredFiles and includedFiles are file glob matches using https://golang.org/pkg/path/filepath/#Match
extended with support for `**`.
If any of the files altered in the commit pass the ignoredFiles filter
and includedFiles is empty, then as far as this filter is concerned, we
should trigger the build.
If any of the files altered in the commit pass the ignoredFiles filter
and includedFiles is not empty, then we make sure that at least one of
those files matches a includedFiles glob. If not, then we do not trigger
a build.
"""
return pulumi.get(self, "included_files")
@included_files.setter
def included_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "included_files", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the volume to mount.
Volume names must be unique per build step and must be valid names for Docker volumes.
Each named volume must be used by at least two build steps.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="pubsubConfig")
def pubsub_config(self) -> Optional[pulumi.Input['TriggerPubsubConfigArgs']]:
"""
PubsubConfig describes the configuration of a trigger that creates
a build whenever a Pub/Sub message is published.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
return pulumi.get(self, "pubsub_config")
@pubsub_config.setter
def pubsub_config(self, value: Optional[pulumi.Input['TriggerPubsubConfigArgs']]):
pulumi.set(self, "pubsub_config", value)
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> Optional[pulumi.Input[str]]:
"""
The service account used for all user-controlled operations including
triggers.patch, triggers.run, builds.create, and builds.cancel.
If no service account is set, then the standard Cloud Build service account
([PROJECT_NUM]@system.gserviceaccount.com) will be used instead.
Format: projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT_ID_OR_EMAIL}
"""
return pulumi.get(self, "service_account")
@service_account.setter
def service_account(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_account", value)
@property
@pulumi.getter
def substitutions(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Substitutions to use in a triggered build. Should only be used with triggers.run
"""
return pulumi.get(self, "substitutions")
@substitutions.setter
def substitutions(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "substitutions", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Tags for annotation of a Build. These are not docker tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="triggerTemplate")
def trigger_template(self) -> Optional[pulumi.Input['TriggerTriggerTemplateArgs']]:
"""
Template describing the types of source changes to trigger a build.
Branch and tag names in trigger templates are interpreted as regular
expressions. Any branch or tag change that matches that regular
expression will trigger a build.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
return pulumi.get(self, "trigger_template")
@trigger_template.setter
def trigger_template(self, value: Optional[pulumi.Input['TriggerTriggerTemplateArgs']]):
pulumi.set(self, "trigger_template", value)
@property
@pulumi.getter(name="webhookConfig")
def webhook_config(self) -> Optional[pulumi.Input['TriggerWebhookConfigArgs']]:
"""
WebhookConfig describes the configuration of a trigger that creates
a build whenever a webhook is sent to a trigger's webhook URL.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
return pulumi.get(self, "webhook_config")
@webhook_config.setter
def webhook_config(self, value: Optional[pulumi.Input['TriggerWebhookConfigArgs']]):
pulumi.set(self, "webhook_config", value)
@pulumi.input_type
class _TriggerState:
def __init__(__self__, *,
build: Optional[pulumi.Input['TriggerBuildArgs']] = None,
create_time: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
filename: Optional[pulumi.Input[str]] = None,
github: Optional[pulumi.Input['TriggerGithubArgs']] = None,
ignored_files: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
included_files: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
pubsub_config: Optional[pulumi.Input['TriggerPubsubConfigArgs']] = None,
service_account: Optional[pulumi.Input[str]] = None,
substitutions: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
trigger_id: Optional[pulumi.Input[str]] = None,
trigger_template: Optional[pulumi.Input['TriggerTriggerTemplateArgs']] = None,
webhook_config: Optional[pulumi.Input['TriggerWebhookConfigArgs']] = None):
"""
Input properties used for looking up and filtering Trigger resources.
:param pulumi.Input['TriggerBuildArgs'] build: Contents of the build template. Either a filename or build template must be provided.
Structure is documented below.
:param pulumi.Input[str] create_time: Time when the trigger was created.
:param pulumi.Input[str] description: Human-readable description of the trigger.
:param pulumi.Input[bool] disabled: Whether the trigger is disabled or not. If true, the trigger will never result in a build.
:param pulumi.Input[str] filename: Path, from the source root, to a file whose contents is used for the template. Either a filename or build template must be provided.
:param pulumi.Input['TriggerGithubArgs'] github: Describes the configuration of a trigger that creates a build whenever a GitHub event is received.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] ignored_files: ignoredFiles and includedFiles are file glob matches using https://golang.org/pkg/path/filepath/#Match
extended with support for `**`.
If ignoredFiles and changed files are both empty, then they are not
used to determine whether or not to trigger a build.
If ignoredFiles is not empty, then we ignore any files that match any
of the ignored_file globs. If the change has no files that are outside
of the ignoredFiles globs, then we do not trigger a build.
:param pulumi.Input[Sequence[pulumi.Input[str]]] included_files: ignoredFiles and includedFiles are file glob matches using https://golang.org/pkg/path/filepath/#Match
extended with support for `**`.
If any of the files altered in the commit pass the ignoredFiles filter
and includedFiles is empty, then as far as this filter is concerned, we
should trigger the build.
If any of the files altered in the commit pass the ignoredFiles filter
and includedFiles is not empty, then we make sure that at least one of
those files matches a includedFiles glob. If not, then we do not trigger
a build.
:param pulumi.Input[str] name: Name of the volume to mount.
Volume names must be unique per build step and must be valid names for Docker volumes.
Each named volume must be used by at least two build steps.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input['TriggerPubsubConfigArgs'] pubsub_config: PubsubConfig describes the configuration of a trigger that creates
a build whenever a Pub/Sub message is published.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
:param pulumi.Input[str] service_account: The service account used for all user-controlled operations including
triggers.patch, triggers.run, builds.create, and builds.cancel.
If no service account is set, then the standard Cloud Build service account
([PROJECT_NUM]@system.gserviceaccount.com) will be used instead.
Format: projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT_ID_OR_EMAIL}
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] substitutions: Substitutions to use in a triggered build. Should only be used with triggers.run
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: Tags for annotation of a Build. These are not docker tags.
:param pulumi.Input[str] trigger_id: The unique identifier for the trigger.
:param pulumi.Input['TriggerTriggerTemplateArgs'] trigger_template: Template describing the types of source changes to trigger a build.
Branch and tag names in trigger templates are interpreted as regular
expressions. Any branch or tag change that matches that regular
expression will trigger a build.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
:param pulumi.Input['TriggerWebhookConfigArgs'] webhook_config: WebhookConfig describes the configuration of a trigger that creates
a build whenever a webhook is sent to a trigger's webhook URL.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
if build is not None:
pulumi.set(__self__, "build", build)
if create_time is not None:
pulumi.set(__self__, "create_time", create_time)
if description is not None:
pulumi.set(__self__, "description", description)
if disabled is not None:
pulumi.set(__self__, "disabled", disabled)
if filename is not None:
pulumi.set(__self__, "filename", filename)
if github is not None:
pulumi.set(__self__, "github", github)
if ignored_files is not None:
pulumi.set(__self__, "ignored_files", ignored_files)
if included_files is not None:
pulumi.set(__self__, "included_files", included_files)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if pubsub_config is not None:
pulumi.set(__self__, "pubsub_config", pubsub_config)
if service_account is not None:
pulumi.set(__self__, "service_account", service_account)
if substitutions is not None:
pulumi.set(__self__, "substitutions", substitutions)
if tags is not None:
pulumi.set(__self__, "tags", tags)
if trigger_id is not None:
pulumi.set(__self__, "trigger_id", trigger_id)
if trigger_template is not None:
pulumi.set(__self__, "trigger_template", trigger_template)
if webhook_config is not None:
pulumi.set(__self__, "webhook_config", webhook_config)
@property
@pulumi.getter
def build(self) -> Optional[pulumi.Input['TriggerBuildArgs']]:
"""
Contents of the build template. Either a filename or build template must be provided.
Structure is documented below.
"""
return pulumi.get(self, "build")
@build.setter
def build(self, value: Optional[pulumi.Input['TriggerBuildArgs']]):
pulumi.set(self, "build", value)
@property
@pulumi.getter(name="createTime")
def create_time(self) -> Optional[pulumi.Input[str]]:
"""
Time when the trigger was created.
"""
return pulumi.get(self, "create_time")
@create_time.setter
def create_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "create_time", value)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
"""
Human-readable description of the trigger.
"""
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter
def disabled(self) -> Optional[pulumi.Input[bool]]:
"""
Whether the trigger is disabled or not. If true, the trigger will never result in a build.
"""
return pulumi.get(self, "disabled")
@disabled.setter
def disabled(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "disabled", value)
@property
@pulumi.getter
def filename(self) -> Optional[pulumi.Input[str]]:
"""
Path, from the source root, to a file whose contents is used for the template. Either a filename or build template must be provided.
"""
return pulumi.get(self, "filename")
@filename.setter
def filename(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "filename", value)
@property
@pulumi.getter
def github(self) -> Optional[pulumi.Input['TriggerGithubArgs']]:
"""
Describes the configuration of a trigger that creates a build whenever a GitHub event is received.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
return pulumi.get(self, "github")
@github.setter
def github(self, value: Optional[pulumi.Input['TriggerGithubArgs']]):
pulumi.set(self, "github", value)
@property
@pulumi.getter(name="ignoredFiles")
def ignored_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
ignoredFiles and includedFiles are file glob matches using https://golang.org/pkg/path/filepath/#Match
extended with support for `**`.
If ignoredFiles and changed files are both empty, then they are not
used to determine whether or not to trigger a build.
If ignoredFiles is not empty, then we ignore any files that match any
of the ignored_file globs. If the change has no files that are outside
of the ignoredFiles globs, then we do not trigger a build.
"""
return pulumi.get(self, "ignored_files")
@ignored_files.setter
def ignored_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "ignored_files", value)
@property
@pulumi.getter(name="includedFiles")
def included_files(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
ignoredFiles and includedFiles are file glob matches using https://golang.org/pkg/path/filepath/#Match
extended with support for `**`.
If any of the files altered in the commit pass the ignoredFiles filter
and includedFiles is empty, then as far as this filter is concerned, we
should trigger the build.
If any of the files altered in the commit pass the ignoredFiles filter
and includedFiles is not empty, then we make sure that at least one of
those files matches a includedFiles glob. If not, then we do not trigger
a build.
"""
return pulumi.get(self, "included_files")
@included_files.setter
def included_files(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "included_files", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Name of the volume to mount.
Volume names must be unique per build step and must be valid names for Docker volumes.
Each named volume must be used by at least two build steps.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="pubsubConfig")
def pubsub_config(self) -> Optional[pulumi.Input['TriggerPubsubConfigArgs']]:
"""
PubsubConfig describes the configuration of a trigger that creates
a build whenever a Pub/Sub message is published.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
return pulumi.get(self, "pubsub_config")
@pubsub_config.setter
def pubsub_config(self, value: Optional[pulumi.Input['TriggerPubsubConfigArgs']]):
pulumi.set(self, "pubsub_config", value)
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> Optional[pulumi.Input[str]]:
"""
The service account used for all user-controlled operations including
triggers.patch, triggers.run, builds.create, and builds.cancel.
If no service account is set, then the standard Cloud Build service account
([PROJECT_NUM]@system.gserviceaccount.com) will be used instead.
Format: projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT_ID_OR_EMAIL}
"""
return pulumi.get(self, "service_account")
@service_account.setter
def service_account(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "service_account", value)
@property
@pulumi.getter
def substitutions(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Substitutions to use in a triggered build. Should only be used with triggers.run
"""
return pulumi.get(self, "substitutions")
@substitutions.setter
def substitutions(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "substitutions", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
Tags for annotation of a Build. These are not docker tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
@property
@pulumi.getter(name="triggerId")
def trigger_id(self) -> Optional[pulumi.Input[str]]:
"""
The unique identifier for the trigger.
"""
return pulumi.get(self, "trigger_id")
@trigger_id.setter
def trigger_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "trigger_id", value)
@property
@pulumi.getter(name="triggerTemplate")
def trigger_template(self) -> Optional[pulumi.Input['TriggerTriggerTemplateArgs']]:
"""
Template describing the types of source changes to trigger a build.
Branch and tag names in trigger templates are interpreted as regular
expressions. Any branch or tag change that matches that regular
expression will trigger a build.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
return pulumi.get(self, "trigger_template")
@trigger_template.setter
def trigger_template(self, value: Optional[pulumi.Input['TriggerTriggerTemplateArgs']]):
pulumi.set(self, "trigger_template", value)
@property
@pulumi.getter(name="webhookConfig")
def webhook_config(self) -> Optional[pulumi.Input['TriggerWebhookConfigArgs']]:
"""
WebhookConfig describes the configuration of a trigger that creates
a build whenever a webhook is sent to a trigger's webhook URL.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
return pulumi.get(self, "webhook_config")
@webhook_config.setter
def webhook_config(self, value: Optional[pulumi.Input['TriggerWebhookConfigArgs']]):
pulumi.set(self, "webhook_config", value)
class Trigger(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
build: Optional[pulumi.Input[pulumi.InputType['TriggerBuildArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
filename: Optional[pulumi.Input[str]] = None,
github: Optional[pulumi.Input[pulumi.InputType['TriggerGithubArgs']]] = None,
ignored_files: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
included_files: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
pubsub_config: Optional[pulumi.Input[pulumi.InputType['TriggerPubsubConfigArgs']]] = None,
service_account: Optional[pulumi.Input[str]] = None,
substitutions: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
trigger_template: Optional[pulumi.Input[pulumi.InputType['TriggerTriggerTemplateArgs']]] = None,
webhook_config: Optional[pulumi.Input[pulumi.InputType['TriggerWebhookConfigArgs']]] = None,
__props__=None):
"""
Configuration for an automated build in response to source repository changes.
To get more information about Trigger, see:
* [API documentation](https://cloud.google.com/cloud-build/docs/api/reference/rest/v1/projects.triggers)
* How-to Guides
* [Automating builds using build triggers](https://cloud.google.com/cloud-build/docs/running-builds/automate-builds)
> **Note:** You can retrieve the email of the Cloud Build Service Account used in jobs by using the `projects.ServiceIdentity` resource.
## Example Usage
### Cloudbuild Trigger Filename
```python
import pulumi
import pulumi_gcp as gcp
filename_trigger = gcp.cloudbuild.Trigger("filename-trigger",
filename="cloudbuild.yaml",
substitutions={
"_BAZ": "qux",
"_FOO": "bar",
},
trigger_template=gcp.cloudbuild.TriggerTriggerTemplateArgs(
branch_name="master",
repo_name="my-repo",
))
```
### Cloudbuild Trigger Build
```python
import pulumi
import pulumi_gcp as gcp
build_trigger = gcp.cloudbuild.Trigger("build-trigger",
build=gcp.cloudbuild.TriggerBuildArgs(
artifacts=gcp.cloudbuild.TriggerBuildArtifactsArgs(
images=["gcr.io/$PROJECT_ID/$REPO_NAME:$COMMIT_SHA"],
objects=gcp.cloudbuild.TriggerBuildArtifactsObjectsArgs(
location="gs://bucket/path/to/somewhere/",
paths=["path"],
),
),
logs_bucket="gs://mybucket/logs",
options=gcp.cloudbuild.TriggerBuildOptionsArgs(
disk_size_gb=100,
dynamic_substitutions=True,
env=["ekey = evalue"],
log_streaming_option="STREAM_OFF",
logging="LEGACY",
machine_type="N1_HIGHCPU_8",
requested_verify_option="VERIFIED",
secret_env=["secretenv = svalue"],
source_provenance_hash=["MD5"],
substitution_option="ALLOW_LOOSE",
volumes=[gcp.cloudbuild.TriggerBuildOptionsVolumeArgs(
name="v1",
path="v1",
)],
worker_pool="pool",
),
queue_ttl="20s",
secrets=[gcp.cloudbuild.TriggerBuildSecretArgs(
kms_key_name="projects/myProject/locations/global/keyRings/keyring-name/cryptoKeys/key-name",
secret_env={
"PASSWORD": "ZW5jcnlwdGVkLXBhc3N3b3JkCg==",
},
)],
source=gcp.cloudbuild.TriggerBuildSourceArgs(
storage_source=gcp.cloudbuild.TriggerBuildSourceStorageSourceArgs(
bucket="mybucket",
object="source_code.tar.gz",
),
),
steps=[gcp.cloudbuild.TriggerBuildStepArgs(
args=[
"cp",
"gs://mybucket/remotefile.zip",
"localfile.zip",
],
name="gcr.io/cloud-builders/gsutil",
timeout="120s",
)],
substitutions={
"_BAZ": "qux",
"_FOO": "bar",
},
tags=[
"build",
"newFeature",
],
),
trigger_template=gcp.cloudbuild.TriggerTriggerTemplateArgs(
branch_name="master",
repo_name="my-repo",
))
```
### Cloudbuild Trigger Service Account
```python
import pulumi
import pulumi_gcp as gcp
project = gcp.organizations.get_project()
cloudbuild_service_account = gcp.service_account.Account("cloudbuildServiceAccount", account_id="my-service-account")
act_as = gcp.projects.IAMMember("actAs",
project=project.project_id,
role="roles/iam.serviceAccountUser",
member=cloudbuild_service_account.email.apply(lambda email: f"serviceAccount:{email}"))
logs_writer = gcp.projects.IAMMember("logsWriter",
project=project.project_id,
role="roles/logging.logWriter",
member=cloudbuild_service_account.email.apply(lambda email: f"serviceAccount:{email}"))
service_account_trigger = gcp.cloudbuild.Trigger("service-account-trigger",
trigger_template=gcp.cloudbuild.TriggerTriggerTemplateArgs(
branch_name="master",
repo_name="my-repo",
),
service_account=cloudbuild_service_account.id,
filename="cloudbuild.yaml",
opts=pulumi.ResourceOptions(depends_on=[
act_as,
logs_writer,
]))
```
## Import
Trigger can be imported using any of these accepted formats
```sh
$ pulumi import gcp:cloudbuild/trigger:Trigger default projects/{{project}}/triggers/{{trigger_id}}
```
```sh
$ pulumi import gcp:cloudbuild/trigger:Trigger default {{project}}/{{trigger_id}}
```
```sh
$ pulumi import gcp:cloudbuild/trigger:Trigger default {{trigger_id}}
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['TriggerBuildArgs']] build: Contents of the build template. Either a filename or build template must be provided.
Structure is documented below.
:param pulumi.Input[str] description: Human-readable description of the trigger.
:param pulumi.Input[bool] disabled: Whether the trigger is disabled or not. If true, the trigger will never result in a build.
:param pulumi.Input[str] filename: Path, from the source root, to a file whose contents is used for the template. Either a filename or build template must be provided.
:param pulumi.Input[pulumi.InputType['TriggerGithubArgs']] github: Describes the configuration of a trigger that creates a build whenever a GitHub event is received.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] ignored_files: ignoredFiles and includedFiles are file glob matches using https://golang.org/pkg/path/filepath/#Match
extended with support for `**`.
If ignoredFiles and changed files are both empty, then they are not
used to determine whether or not to trigger a build.
If ignoredFiles is not empty, then we ignore any files that match any
of the ignored_file globs. If the change has no files that are outside
of the ignoredFiles globs, then we do not trigger a build.
:param pulumi.Input[Sequence[pulumi.Input[str]]] included_files: ignoredFiles and includedFiles are file glob matches using https://golang.org/pkg/path/filepath/#Match
extended with support for `**`.
If any of the files altered in the commit pass the ignoredFiles filter
and includedFiles is empty, then as far as this filter is concerned, we
should trigger the build.
If any of the files altered in the commit pass the ignoredFiles filter
and includedFiles is not empty, then we make sure that at least one of
those files matches a includedFiles glob. If not, then we do not trigger
a build.
:param pulumi.Input[str] name: Name of the volume to mount.
Volume names must be unique per build step and must be valid names for Docker volumes.
Each named volume must be used by at least two build steps.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[pulumi.InputType['TriggerPubsubConfigArgs']] pubsub_config: PubsubConfig describes the configuration of a trigger that creates
a build whenever a Pub/Sub message is published.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
:param pulumi.Input[str] service_account: The service account used for all user-controlled operations including
triggers.patch, triggers.run, builds.create, and builds.cancel.
If no service account is set, then the standard Cloud Build service account
([PROJECT_NUM]@system.gserviceaccount.com) will be used instead.
Format: projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT_ID_OR_EMAIL}
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] substitutions: Substitutions to use in a triggered build. Should only be used with triggers.run
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: Tags for annotation of a Build. These are not docker tags.
:param pulumi.Input[pulumi.InputType['TriggerTriggerTemplateArgs']] trigger_template: Template describing the types of source changes to trigger a build.
Branch and tag names in trigger templates are interpreted as regular
expressions. Any branch or tag change that matches that regular
expression will trigger a build.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
:param pulumi.Input[pulumi.InputType['TriggerWebhookConfigArgs']] webhook_config: WebhookConfig describes the configuration of a trigger that creates
a build whenever a webhook is sent to a trigger's webhook URL.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[TriggerArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Configuration for an automated build in response to source repository changes.
To get more information about Trigger, see:
* [API documentation](https://cloud.google.com/cloud-build/docs/api/reference/rest/v1/projects.triggers)
* How-to Guides
* [Automating builds using build triggers](https://cloud.google.com/cloud-build/docs/running-builds/automate-builds)
> **Note:** You can retrieve the email of the Cloud Build Service Account used in jobs by using the `projects.ServiceIdentity` resource.
## Example Usage
### Cloudbuild Trigger Filename
```python
import pulumi
import pulumi_gcp as gcp
filename_trigger = gcp.cloudbuild.Trigger("filename-trigger",
filename="cloudbuild.yaml",
substitutions={
"_BAZ": "qux",
"_FOO": "bar",
},
trigger_template=gcp.cloudbuild.TriggerTriggerTemplateArgs(
branch_name="master",
repo_name="my-repo",
))
```
### Cloudbuild Trigger Build
```python
import pulumi
import pulumi_gcp as gcp
build_trigger = gcp.cloudbuild.Trigger("build-trigger",
build=gcp.cloudbuild.TriggerBuildArgs(
artifacts=gcp.cloudbuild.TriggerBuildArtifactsArgs(
images=["gcr.io/$PROJECT_ID/$REPO_NAME:$COMMIT_SHA"],
objects=gcp.cloudbuild.TriggerBuildArtifactsObjectsArgs(
location="gs://bucket/path/to/somewhere/",
paths=["path"],
),
),
logs_bucket="gs://mybucket/logs",
options=gcp.cloudbuild.TriggerBuildOptionsArgs(
disk_size_gb=100,
dynamic_substitutions=True,
env=["ekey = evalue"],
log_streaming_option="STREAM_OFF",
logging="LEGACY",
machine_type="N1_HIGHCPU_8",
requested_verify_option="VERIFIED",
secret_env=["secretenv = svalue"],
source_provenance_hash=["MD5"],
substitution_option="ALLOW_LOOSE",
volumes=[gcp.cloudbuild.TriggerBuildOptionsVolumeArgs(
name="v1",
path="v1",
)],
worker_pool="pool",
),
queue_ttl="20s",
secrets=[gcp.cloudbuild.TriggerBuildSecretArgs(
kms_key_name="projects/myProject/locations/global/keyRings/keyring-name/cryptoKeys/key-name",
secret_env={
"PASSWORD": "ZW5jcnlwdGVkLXBhc3N3b3JkCg==",
},
)],
source=gcp.cloudbuild.TriggerBuildSourceArgs(
storage_source=gcp.cloudbuild.TriggerBuildSourceStorageSourceArgs(
bucket="mybucket",
object="source_code.tar.gz",
),
),
steps=[gcp.cloudbuild.TriggerBuildStepArgs(
args=[
"cp",
"gs://mybucket/remotefile.zip",
"localfile.zip",
],
name="gcr.io/cloud-builders/gsutil",
timeout="120s",
)],
substitutions={
"_BAZ": "qux",
"_FOO": "bar",
},
tags=[
"build",
"newFeature",
],
),
trigger_template=gcp.cloudbuild.TriggerTriggerTemplateArgs(
branch_name="master",
repo_name="my-repo",
))
```
### Cloudbuild Trigger Service Account
```python
import pulumi
import pulumi_gcp as gcp
project = gcp.organizations.get_project()
cloudbuild_service_account = gcp.service_account.Account("cloudbuildServiceAccount", account_id="my-service-account")
act_as = gcp.projects.IAMMember("actAs",
project=project.project_id,
role="roles/iam.serviceAccountUser",
member=cloudbuild_service_account.email.apply(lambda email: f"serviceAccount:{email}"))
logs_writer = gcp.projects.IAMMember("logsWriter",
project=project.project_id,
role="roles/logging.logWriter",
member=cloudbuild_service_account.email.apply(lambda email: f"serviceAccount:{email}"))
service_account_trigger = gcp.cloudbuild.Trigger("service-account-trigger",
trigger_template=gcp.cloudbuild.TriggerTriggerTemplateArgs(
branch_name="master",
repo_name="my-repo",
),
service_account=cloudbuild_service_account.id,
filename="cloudbuild.yaml",
opts=pulumi.ResourceOptions(depends_on=[
act_as,
logs_writer,
]))
```
## Import
Trigger can be imported using any of these accepted formats
```sh
$ pulumi import gcp:cloudbuild/trigger:Trigger default projects/{{project}}/triggers/{{trigger_id}}
```
```sh
$ pulumi import gcp:cloudbuild/trigger:Trigger default {{project}}/{{trigger_id}}
```
```sh
$ pulumi import gcp:cloudbuild/trigger:Trigger default {{trigger_id}}
```
:param str resource_name: The name of the resource.
:param TriggerArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(TriggerArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
build: Optional[pulumi.Input[pulumi.InputType['TriggerBuildArgs']]] = None,
description: Optional[pulumi.Input[str]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
filename: Optional[pulumi.Input[str]] = None,
github: Optional[pulumi.Input[pulumi.InputType['TriggerGithubArgs']]] = None,
ignored_files: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
included_files: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
pubsub_config: Optional[pulumi.Input[pulumi.InputType['TriggerPubsubConfigArgs']]] = None,
service_account: Optional[pulumi.Input[str]] = None,
substitutions: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
trigger_template: Optional[pulumi.Input[pulumi.InputType['TriggerTriggerTemplateArgs']]] = None,
webhook_config: Optional[pulumi.Input[pulumi.InputType['TriggerWebhookConfigArgs']]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = TriggerArgs.__new__(TriggerArgs)
__props__.__dict__["build"] = build
__props__.__dict__["description"] = description
__props__.__dict__["disabled"] = disabled
__props__.__dict__["filename"] = filename
__props__.__dict__["github"] = github
__props__.__dict__["ignored_files"] = ignored_files
__props__.__dict__["included_files"] = included_files
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["pubsub_config"] = pubsub_config
__props__.__dict__["service_account"] = service_account
__props__.__dict__["substitutions"] = substitutions
__props__.__dict__["tags"] = tags
__props__.__dict__["trigger_template"] = trigger_template
__props__.__dict__["webhook_config"] = webhook_config
__props__.__dict__["create_time"] = None
__props__.__dict__["trigger_id"] = None
super(Trigger, __self__).__init__(
'gcp:cloudbuild/trigger:Trigger',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
build: Optional[pulumi.Input[pulumi.InputType['TriggerBuildArgs']]] = None,
create_time: Optional[pulumi.Input[str]] = None,
description: Optional[pulumi.Input[str]] = None,
disabled: Optional[pulumi.Input[bool]] = None,
filename: Optional[pulumi.Input[str]] = None,
github: Optional[pulumi.Input[pulumi.InputType['TriggerGithubArgs']]] = None,
ignored_files: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
included_files: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
pubsub_config: Optional[pulumi.Input[pulumi.InputType['TriggerPubsubConfigArgs']]] = None,
service_account: Optional[pulumi.Input[str]] = None,
substitutions: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
tags: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
trigger_id: Optional[pulumi.Input[str]] = None,
trigger_template: Optional[pulumi.Input[pulumi.InputType['TriggerTriggerTemplateArgs']]] = None,
webhook_config: Optional[pulumi.Input[pulumi.InputType['TriggerWebhookConfigArgs']]] = None) -> 'Trigger':
"""
Get an existing Trigger resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['TriggerBuildArgs']] build: Contents of the build template. Either a filename or build template must be provided.
Structure is documented below.
:param pulumi.Input[str] create_time: Time when the trigger was created.
:param pulumi.Input[str] description: Human-readable description of the trigger.
:param pulumi.Input[bool] disabled: Whether the trigger is disabled or not. If true, the trigger will never result in a build.
:param pulumi.Input[str] filename: Path, from the source root, to a file whose contents is used for the template. Either a filename or build template must be provided.
:param pulumi.Input[pulumi.InputType['TriggerGithubArgs']] github: Describes the configuration of a trigger that creates a build whenever a GitHub event is received.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
:param pulumi.Input[Sequence[pulumi.Input[str]]] ignored_files: ignoredFiles and includedFiles are file glob matches using https://golang.org/pkg/path/filepath/#Match
extended with support for `**`.
If ignoredFiles and changed files are both empty, then they are not
used to determine whether or not to trigger a build.
If ignoredFiles is not empty, then we ignore any files that match any
of the ignored_file globs. If the change has no files that are outside
of the ignoredFiles globs, then we do not trigger a build.
:param pulumi.Input[Sequence[pulumi.Input[str]]] included_files: ignoredFiles and includedFiles are file glob matches using https://golang.org/pkg/path/filepath/#Match
extended with support for `**`.
If any of the files altered in the commit pass the ignoredFiles filter
and includedFiles is empty, then as far as this filter is concerned, we
should trigger the build.
If any of the files altered in the commit pass the ignoredFiles filter
and includedFiles is not empty, then we make sure that at least one of
those files matches a includedFiles glob. If not, then we do not trigger
a build.
:param pulumi.Input[str] name: Name of the volume to mount.
Volume names must be unique per build step and must be valid names for Docker volumes.
Each named volume must be used by at least two build steps.
:param pulumi.Input[str] project: The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
:param pulumi.Input[pulumi.InputType['TriggerPubsubConfigArgs']] pubsub_config: PubsubConfig describes the configuration of a trigger that creates
a build whenever a Pub/Sub message is published.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
:param pulumi.Input[str] service_account: The service account used for all user-controlled operations including
triggers.patch, triggers.run, builds.create, and builds.cancel.
If no service account is set, then the standard Cloud Build service account
([PROJECT_NUM]@system.gserviceaccount.com) will be used instead.
Format: projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT_ID_OR_EMAIL}
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] substitutions: Substitutions to use in a triggered build. Should only be used with triggers.run
:param pulumi.Input[Sequence[pulumi.Input[str]]] tags: Tags for annotation of a Build. These are not docker tags.
:param pulumi.Input[str] trigger_id: The unique identifier for the trigger.
:param pulumi.Input[pulumi.InputType['TriggerTriggerTemplateArgs']] trigger_template: Template describing the types of source changes to trigger a build.
Branch and tag names in trigger templates are interpreted as regular
expressions. Any branch or tag change that matches that regular
expression will trigger a build.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
:param pulumi.Input[pulumi.InputType['TriggerWebhookConfigArgs']] webhook_config: WebhookConfig describes the configuration of a trigger that creates
a build whenever a webhook is sent to a trigger's webhook URL.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _TriggerState.__new__(_TriggerState)
__props__.__dict__["build"] = build
__props__.__dict__["create_time"] = create_time
__props__.__dict__["description"] = description
__props__.__dict__["disabled"] = disabled
__props__.__dict__["filename"] = filename
__props__.__dict__["github"] = github
__props__.__dict__["ignored_files"] = ignored_files
__props__.__dict__["included_files"] = included_files
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["pubsub_config"] = pubsub_config
__props__.__dict__["service_account"] = service_account
__props__.__dict__["substitutions"] = substitutions
__props__.__dict__["tags"] = tags
__props__.__dict__["trigger_id"] = trigger_id
__props__.__dict__["trigger_template"] = trigger_template
__props__.__dict__["webhook_config"] = webhook_config
return Trigger(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def build(self) -> pulumi.Output[Optional['outputs.TriggerBuild']]:
"""
Contents of the build template. Either a filename or build template must be provided.
Structure is documented below.
"""
return pulumi.get(self, "build")
@property
@pulumi.getter(name="createTime")
def create_time(self) -> pulumi.Output[str]:
"""
Time when the trigger was created.
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
"""
Human-readable description of the trigger.
"""
return pulumi.get(self, "description")
@property
@pulumi.getter
def disabled(self) -> pulumi.Output[Optional[bool]]:
"""
Whether the trigger is disabled or not. If true, the trigger will never result in a build.
"""
return pulumi.get(self, "disabled")
@property
@pulumi.getter
def filename(self) -> pulumi.Output[Optional[str]]:
"""
Path, from the source root, to a file whose contents is used for the template. Either a filename or build template must be provided.
"""
return pulumi.get(self, "filename")
@property
@pulumi.getter
def github(self) -> pulumi.Output[Optional['outputs.TriggerGithub']]:
"""
Describes the configuration of a trigger that creates a build whenever a GitHub event is received.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
return pulumi.get(self, "github")
@property
@pulumi.getter(name="ignoredFiles")
def ignored_files(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
ignoredFiles and includedFiles are file glob matches using https://golang.org/pkg/path/filepath/#Match
extended with support for `**`.
If ignoredFiles and changed files are both empty, then they are not
used to determine whether or not to trigger a build.
If ignoredFiles is not empty, then we ignore any files that match any
of the ignored_file globs. If the change has no files that are outside
of the ignoredFiles globs, then we do not trigger a build.
"""
return pulumi.get(self, "ignored_files")
@property
@pulumi.getter(name="includedFiles")
def included_files(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
ignoredFiles and includedFiles are file glob matches using https://golang.org/pkg/path/filepath/#Match
extended with support for `**`.
If any of the files altered in the commit pass the ignoredFiles filter
and includedFiles is empty, then as far as this filter is concerned, we
should trigger the build.
If any of the files altered in the commit pass the ignoredFiles filter
and includedFiles is not empty, then we make sure that at least one of
those files matches a includedFiles glob. If not, then we do not trigger
a build.
"""
return pulumi.get(self, "included_files")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Name of the volume to mount.
Volume names must be unique per build step and must be valid names for Docker volumes.
Each named volume must be used by at least two build steps.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def project(self) -> pulumi.Output[str]:
"""
The ID of the project in which the resource belongs.
If it is not provided, the provider project is used.
"""
return pulumi.get(self, "project")
@property
@pulumi.getter(name="pubsubConfig")
def pubsub_config(self) -> pulumi.Output[Optional['outputs.TriggerPubsubConfig']]:
"""
PubsubConfig describes the configuration of a trigger that creates
a build whenever a Pub/Sub message is published.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
return pulumi.get(self, "pubsub_config")
@property
@pulumi.getter(name="serviceAccount")
def service_account(self) -> pulumi.Output[Optional[str]]:
"""
The service account used for all user-controlled operations including
triggers.patch, triggers.run, builds.create, and builds.cancel.
If no service account is set, then the standard Cloud Build service account
([PROJECT_NUM]@system.gserviceaccount.com) will be used instead.
Format: projects/{PROJECT_ID}/serviceAccounts/{ACCOUNT_ID_OR_EMAIL}
"""
return pulumi.get(self, "service_account")
@property
@pulumi.getter
def substitutions(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Substitutions to use in a triggered build. Should only be used with triggers.run
"""
return pulumi.get(self, "substitutions")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Sequence[str]]]:
"""
Tags for annotation of a Build. These are not docker tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="triggerId")
def trigger_id(self) -> pulumi.Output[str]:
"""
The unique identifier for the trigger.
"""
return pulumi.get(self, "trigger_id")
@property
@pulumi.getter(name="triggerTemplate")
def trigger_template(self) -> pulumi.Output[Optional['outputs.TriggerTriggerTemplate']]:
"""
Template describing the types of source changes to trigger a build.
Branch and tag names in trigger templates are interpreted as regular
expressions. Any branch or tag change that matches that regular
expression will trigger a build.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
return pulumi.get(self, "trigger_template")
@property
@pulumi.getter(name="webhookConfig")
def webhook_config(self) -> pulumi.Output[Optional['outputs.TriggerWebhookConfig']]:
"""
WebhookConfig describes the configuration of a trigger that creates
a build whenever a webhook is sent to a trigger's webhook URL.
One of `trigger_template`, `github`, `pubsub_config` or `webhook_config` must be provided.
Structure is documented below.
"""
return pulumi.get(self, "webhook_config")
| 50.12023
| 175
| 0.640361
| 8,003
| 69,617
| 5.441709
| 0.05248
| 0.066429
| 0.062388
| 0.026774
| 0.958071
| 0.94969
| 0.938691
| 0.936211
| 0.93504
| 0.920344
| 0
| 0.000727
| 0.268914
| 69,617
| 1,388
| 176
| 50.15634
| 0.854939
| 0.535128
| 0
| 0.857955
| 1
| 0
| 0.114853
| 0.029842
| 0
| 0
| 0
| 0
| 0
| 1
| 0.166667
| false
| 0.001894
| 0.013258
| 0
| 0.280303
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
62553dd6dac6621d63f321e2de23fab8f0bb614b
| 89
|
py
|
Python
|
principal.py
|
CAiAuM/Travis1vez
|
bc2ae8839c1750b306b15a0e6257a68f74ced2be
|
[
"Apache-2.0"
] | null | null | null |
principal.py
|
CAiAuM/Travis1vez
|
bc2ae8839c1750b306b15a0e6257a68f74ced2be
|
[
"Apache-2.0"
] | null | null | null |
principal.py
|
CAiAuM/Travis1vez
|
bc2ae8839c1750b306b15a0e6257a68f74ced2be
|
[
"Apache-2.0"
] | null | null | null |
def soma (x,y):
return x+y
def subtrai (x,y):
return x-y
def mult():
pass
| 8.9
| 18
| 0.539326
| 17
| 89
| 2.823529
| 0.470588
| 0.166667
| 0.333333
| 0.375
| 0.541667
| 0.541667
| 0
| 0
| 0
| 0
| 0
| 0
| 0.314607
| 89
| 9
| 19
| 9.888889
| 0.786885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0.166667
| 0
| 0.333333
| 0.833333
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 8
|
02ed271cec118b28946cfd6011ed95e6135473a0
| 1,133
|
py
|
Python
|
forum/migrations/0077_auto_20150613_1650.py
|
shmilyoo/ggxxBBS
|
cef6408e533bd0b0f57c3e2f5da4e93ea07c4331
|
[
"MIT"
] | null | null | null |
forum/migrations/0077_auto_20150613_1650.py
|
shmilyoo/ggxxBBS
|
cef6408e533bd0b0f57c3e2f5da4e93ea07c4331
|
[
"MIT"
] | null | null | null |
forum/migrations/0077_auto_20150613_1650.py
|
shmilyoo/ggxxBBS
|
cef6408e533bd0b0f57c3e2f5da4e93ea07c4331
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('forum', '0076_auto_20150613_1628'),
]
operations = [
migrations.AddField(
model_name='post',
name='against',
field=models.PositiveIntegerField(default=0, verbose_name='\u53cd\u5bf9'),
preserve_default=True,
),
migrations.AddField(
model_name='post',
name='support',
field=models.PositiveIntegerField(default=0, verbose_name='\u652f\u6301'),
preserve_default=True,
),
migrations.AddField(
model_name='topic',
name='against',
field=models.PositiveIntegerField(default=0, verbose_name='\u53cd\u5bf9'),
preserve_default=True,
),
migrations.AddField(
model_name='topic',
name='support',
field=models.PositiveIntegerField(default=0, verbose_name='\u652f\u6301'),
preserve_default=True,
),
]
| 29.051282
| 86
| 0.583407
| 101
| 1,133
| 6.346535
| 0.386139
| 0.112324
| 0.143526
| 0.168487
| 0.75819
| 0.75819
| 0.703588
| 0.703588
| 0.703588
| 0.645866
| 0
| 0.054156
| 0.299206
| 1,133
| 38
| 87
| 29.815789
| 0.753149
| 0.018535
| 0
| 0.75
| 0
| 0
| 0.10991
| 0.020721
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.0625
| 0
| 0.15625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b23a67aec4ffc48ce38c316ff5691ad459aa6f81
| 40,879
|
py
|
Python
|
core/tests/test_polypod/test_pods/test_volumes.py
|
admariner/polyaxon
|
ba355c38166047eb11e60de4cee4d7c3b48db323
|
[
"Apache-2.0"
] | 3,200
|
2017-05-09T11:35:31.000Z
|
2022-03-28T05:43:22.000Z
|
core/tests/test_polypod/test_pods/test_volumes.py
|
admariner/polyaxon
|
ba355c38166047eb11e60de4cee4d7c3b48db323
|
[
"Apache-2.0"
] | 1,324
|
2017-06-29T07:21:27.000Z
|
2022-03-27T12:41:10.000Z
|
core/tests/test_polypod/test_pods/test_volumes.py
|
admariner/polyaxon
|
ba355c38166047eb11e60de4cee4d7c3b48db323
|
[
"Apache-2.0"
] | 341
|
2017-01-10T23:06:53.000Z
|
2022-03-10T08:15:18.000Z
|
#!/usr/bin/python
#
# Copyright 2018-2021 Polyaxon, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from polyaxon.connections.kinds import V1ConnectionKind
from polyaxon.connections.schemas import (
V1BucketConnection,
V1ClaimConnection,
V1HostPathConnection,
V1K8sResourceSchema,
)
from polyaxon.polyflow import V1Init, V1Plugins
from polyaxon.polypod.common import constants
from polyaxon.polypod.common.volumes import (
get_artifacts_context_volume,
get_configs_context_volume,
get_connections_context_volume,
get_docker_context_volume,
get_shm_context_volume,
get_volume,
get_volume_from_config_map,
get_volume_from_connection,
get_volume_from_secret,
)
from polyaxon.polypod.pod.volumes import get_pod_volumes
from polyaxon.polypod.specs.contexts import PluginsContextsSpec
from polyaxon.schemas.types import V1ConnectionType, V1K8sResourceType
from tests.utils import BaseTestCase
@pytest.mark.polypod_mark
class TestPodVolumes(BaseTestCase):
def setUp(self):
super().setUp()
# Secrets and config maps
self.non_mount_resource1 = V1K8sResourceType(
name="non_mount_test1",
schema=V1K8sResourceSchema(
name="non_mount_test1", items=["item1", "item2"]
),
is_requested=False,
)
self.non_mount_resource2 = V1K8sResourceType(
name="non_mount_test2",
schema=V1K8sResourceSchema(name="non_mount_test2"),
is_requested=False,
)
self.mount_resource1 = V1K8sResourceType(
name="mount_test1",
schema=V1K8sResourceSchema(
name="mount_test1", items=["item1", "item2"], mount_path="/tmp1"
),
is_requested=False,
)
self.mount_resource2 = V1K8sResourceType(
name="mount_test1",
schema=V1K8sResourceSchema(
name="mount_test1", items=["item1", "item2"], mount_path="/tmp2"
),
is_requested=False,
)
# Volumes
self.vol1 = get_volume(volume="vol1", claim_name="claim1")
self.vol2 = get_volume(volume="vol2", host_path="/path2")
self.vol3 = get_volume(volume="vol3")
# Connections
self.s3_store = V1ConnectionType(
name="test_s3",
kind=V1ConnectionKind.S3,
schema=V1BucketConnection(bucket="s3//:foo"),
secret=self.mount_resource1.schema,
)
self.gcs_store = V1ConnectionType(
name="test_gcs",
kind=V1ConnectionKind.GCS,
schema=V1BucketConnection(bucket="gs//:foo"),
secret=self.mount_resource1.schema,
)
self.az_store = V1ConnectionType(
name="test_az",
kind=V1ConnectionKind.WASB,
secret=self.mount_resource1.schema,
)
self.claim_store = V1ConnectionType(
name="test_claim",
kind=V1ConnectionKind.VOLUME_CLAIM,
schema=V1ClaimConnection(
mount_path="/tmp", volume_claim="test", read_only=True
),
)
self.host_path_store = V1ConnectionType(
name="test_path",
kind=V1ConnectionKind.HOST_PATH,
schema=V1HostPathConnection(mount_path="/tmp", host_path="/tmp"),
)
def test_default_volumes(self):
assert (
get_pod_volumes(
contexts=None,
artifacts_store=None,
init_connections=None,
connections=None,
connection_by_names=None,
secrets=None,
config_maps=None,
volumes=None,
)
== []
)
assert (
get_pod_volumes(
contexts=None,
artifacts_store=None,
init_connections=[],
connections=[],
connection_by_names={},
secrets=[],
config_maps=[],
volumes=[],
)
== []
)
assert (
get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=False,
shm=False,
auth=False,
collect_artifacts=False,
collect_logs=False,
)
),
artifacts_store=None,
init_connections=[],
connections=[],
connection_by_names={},
secrets=[],
config_maps=[],
volumes=[],
)
== []
)
assert get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=True,
auth=True,
collect_artifacts=False,
collect_logs=False,
)
),
artifacts_store=None,
init_connections=[],
connections=[],
connection_by_names={},
secrets=[],
config_maps=[],
volumes=[],
) == [
get_shm_context_volume(),
get_configs_context_volume(),
get_docker_context_volume(),
]
def test_auth_context(self):
assert (
get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=False,
shm=False,
auth=True,
collect_artifacts=False,
collect_logs=False,
)
),
artifacts_store=None,
init_connections=[],
connections=[],
connection_by_names={},
secrets=[],
config_maps=[],
volumes=[],
)
== [get_configs_context_volume()]
)
def test_docker_context(self):
assert (
get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=False,
auth=False,
collect_artifacts=False,
collect_logs=False,
)
),
artifacts_store=None,
init_connections=[],
connections=[],
connection_by_names={},
secrets=[],
config_maps=[],
volumes=[],
)
== [get_docker_context_volume()]
)
def test_shm_context(self):
assert (
get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=False,
shm=True,
auth=False,
collect_artifacts=False,
collect_logs=False,
)
),
artifacts_store=None,
init_connections=[],
connections=[],
connection_by_names={},
secrets=[],
config_maps=[],
volumes=[],
)
== [get_shm_context_volume()]
)
def test_passing_volumes(self):
assert (
get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=False,
shm=False,
auth=False,
collect_artifacts=False,
collect_logs=False,
)
),
artifacts_store=None,
init_connections=[],
connections=[],
connection_by_names={},
secrets=[],
config_maps=[],
volumes=[self.vol1, self.vol2, self.vol3],
)
== [self.vol1, self.vol2, self.vol3]
)
@staticmethod
def assert_artifacts_store(store, results):
assert (
get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=False,
shm=False,
auth=False,
collect_artifacts=True,
collect_logs=False,
)
),
artifacts_store=store,
init_connections=[],
connections=[],
connection_by_names={store.name: store},
secrets=[],
config_maps=[],
volumes=[],
)
== results
)
def test_artifacts_store(self):
self.assert_artifacts_store(
store=self.s3_store,
results=[
get_volume_from_secret(secret=self.mount_resource1),
get_artifacts_context_volume(),
],
)
self.assert_artifacts_store(
store=self.gcs_store,
results=[
get_volume_from_secret(secret=self.mount_resource1),
get_artifacts_context_volume(),
],
)
self.assert_artifacts_store(
store=self.az_store,
results=[
get_volume_from_secret(secret=self.mount_resource1),
get_artifacts_context_volume(),
],
)
self.assert_artifacts_store(
store=self.claim_store,
results=[
get_volume_from_connection(connection=self.claim_store),
get_artifacts_context_volume(),
],
)
self.assert_artifacts_store(
store=self.host_path_store,
results=[
get_volume_from_connection(connection=self.host_path_store),
get_artifacts_context_volume(),
],
)
@staticmethod
def assert_single_artifacts_store(store, results):
assert (
get_pod_volumes(
contexts=None,
artifacts_store=None,
init_connections=[],
connection_by_names={store.name: store},
connections=[],
secrets=[],
config_maps=[],
volumes=[],
)
== results
)
@staticmethod
def assert_single_init_artifacts_store(store, results):
assert (
get_pod_volumes(
contexts=None,
artifacts_store=None,
init_connections=[V1Init(connection=store.name)],
connection_by_names={store.name: store},
connections=[],
secrets=[],
config_maps=[],
volumes=[],
)
== results
)
def test_single_connections(self):
self.assert_single_artifacts_store(store=self.s3_store, results=[])
self.assert_single_artifacts_store(store=self.gcs_store, results=[])
self.assert_single_artifacts_store(store=self.az_store, results=[])
self.assert_single_artifacts_store(
store=self.claim_store,
results=[get_volume_from_connection(connection=self.claim_store)],
)
self.assert_single_artifacts_store(
store=self.host_path_store,
results=[get_volume_from_connection(connection=self.host_path_store)],
)
# Managed versions
ctx_volume_name = constants.CONTEXT_VOLUME_ARTIFACTS
self.assert_single_init_artifacts_store(
store=self.s3_store,
results=[
get_connections_context_volume(name=ctx_volume_name),
get_volume_from_secret(secret=self.mount_resource1),
],
)
self.assert_single_init_artifacts_store(
store=self.gcs_store,
results=[
get_connections_context_volume(name=ctx_volume_name),
get_volume_from_secret(secret=self.mount_resource1),
],
)
self.assert_single_init_artifacts_store(
store=self.az_store,
results=[
get_connections_context_volume(name=ctx_volume_name),
get_volume_from_secret(secret=self.mount_resource1),
],
)
self.assert_single_init_artifacts_store(
store=self.claim_store,
results=[
get_connections_context_volume(name=ctx_volume_name),
get_volume_from_connection(connection=self.claim_store),
],
)
self.assert_single_init_artifacts_store(
store=self.host_path_store,
results=[
get_connections_context_volume(name=ctx_volume_name),
get_volume_from_connection(connection=self.host_path_store),
],
)
def test_multi_connections(self):
connection_by_names = {
self.s3_store.name: self.s3_store,
self.gcs_store.name: self.gcs_store,
self.az_store.name: self.az_store,
self.claim_store.name: self.claim_store,
self.host_path_store.name: self.host_path_store,
}
init_connections = [
V1Init(connection=self.s3_store.name, path="/test-1"),
V1Init(connection=self.gcs_store.name, path="/test-2"),
V1Init(connection=self.az_store.name, path="/test-3"),
V1Init(connection=self.claim_store.name, path="/test-4"),
V1Init(connection=self.host_path_store.name, path="/test-5"),
]
assert (
len(
get_pod_volumes(
contexts=None,
artifacts_store=None,
init_connections=[],
connection_by_names=connection_by_names,
connections=[],
secrets=[],
config_maps=[],
volumes=[],
)
)
== 2
)
# test all inits are mounted to the same context and a single secret requested for all
assert (
len(
get_pod_volumes(
contexts=None,
artifacts_store=None,
init_connections=[
V1Init(connection=self.s3_store.name),
V1Init(connection=self.gcs_store.name),
V1Init(connection=self.az_store.name),
V1Init(connection=self.claim_store.name),
V1Init(connection=self.host_path_store.name),
],
connection_by_names=connection_by_names,
connections=[],
secrets=[],
config_maps=[],
volumes=[],
)
)
== 4
)
assert (
len(
get_pod_volumes(
contexts=None,
artifacts_store=None,
init_connections=init_connections,
connection_by_names=connection_by_names,
connections=[],
secrets=[],
config_maps=[],
volumes=[],
)
)
== 8
)
assert (
len(
get_pod_volumes(
contexts=None,
artifacts_store=None,
init_connections=init_connections,
connection_by_names=connection_by_names,
connections=[],
secrets=[],
config_maps=[],
volumes=[],
)
)
== 8
)
assert (
len(
get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=True,
auth=True,
collect_artifacts=True,
collect_logs=True,
)
),
artifacts_store=self.claim_store,
init_connections=init_connections,
connection_by_names=connection_by_names,
connections=[],
secrets=[],
config_maps=[],
volumes=[],
)
)
== 12
)
assert (
len(
get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=True,
auth=True,
collect_artifacts=True,
collect_logs=True,
)
),
artifacts_store=self.claim_store,
init_connections=init_connections,
connection_by_names=connection_by_names,
connections=list(connection_by_names.keys()),
secrets=[],
config_maps=[],
volumes=[],
)
)
== 12
)
@staticmethod
def assert_secret(secret, connection, results):
dummy_connection = (
V1ConnectionType(
name="connection",
kind=V1ConnectionKind.S3,
schema=None,
secret=secret.schema,
)
if connection
else None
)
connection_by_names = {"connection": dummy_connection} if connection else {}
connections = ["connection"] if connection else []
assert (
get_pod_volumes(
contexts=None,
artifacts_store=None,
init_connections=[],
connections=connections,
connection_by_names=connection_by_names,
secrets=[secret],
config_maps=[],
volumes=[],
)
== results
)
@staticmethod
def assert_config_map(config_map, connection, results):
dummy_connection = (
V1ConnectionType(
name="connection",
kind=V1ConnectionKind.S3,
schema=None,
config_map=config_map.schema,
)
if connection
else None
)
connection_by_names = {"connection": dummy_connection} if connection else {}
connections = ["connection"] if connection else []
assert (
get_pod_volumes(
contexts=None,
artifacts_store=None,
init_connections=[],
connections=connections,
connection_by_names=connection_by_names,
secrets=[],
config_maps=[config_map],
volumes=[],
)
== results
)
def test_secret_volumes(self):
self.assert_secret(
secret=self.non_mount_resource1, connection=False, results=[]
)
self.assert_secret(
secret=self.non_mount_resource2, connection=False, results=[]
)
self.assert_secret(secret=self.mount_resource1, connection=False, results=[])
self.assert_secret(secret=self.mount_resource2, connection=False, results=[])
self.assert_secret(
secret=self.mount_resource1,
connection=True,
results=[get_volume_from_secret(secret=self.mount_resource1)],
)
self.assert_secret(
secret=self.mount_resource2,
connection=True,
results=[get_volume_from_secret(secret=self.mount_resource2)],
)
def test_config_map_volumes(self):
self.assert_config_map(
config_map=self.non_mount_resource1, connection=False, results=[]
)
self.assert_config_map(
config_map=self.non_mount_resource2, connection=False, results=[]
)
self.assert_config_map(
config_map=self.mount_resource1, connection=False, results=[]
)
self.assert_config_map(
config_map=self.mount_resource2, connection=False, results=[]
)
self.assert_config_map(
config_map=self.mount_resource1,
connection=True,
results=[get_volume_from_config_map(config_map=self.mount_resource1)],
)
self.assert_config_map(
config_map=self.mount_resource2,
connection=True,
results=[get_volume_from_config_map(config_map=self.mount_resource2)],
)
def test_multiple_resources(self):
assert (
get_pod_volumes(
contexts=None,
artifacts_store=None,
init_connections=[],
connection_by_names={},
connections=[],
secrets=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
config_maps=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
volumes=[],
)
== []
)
# Make the resources requested
self.non_mount_resource1.is_requested = True
self.non_mount_resource2.is_requested = True
self.mount_resource1.is_requested = True
self.mount_resource2.is_requested = True
assert get_pod_volumes(
contexts=None,
artifacts_store=None,
init_connections=[],
connection_by_names={},
connections=[],
secrets=[
self.non_mount_resource1,
self.non_mount_resource2,
self.mount_resource1,
self.mount_resource2,
],
config_maps=[
self.non_mount_resource1,
self.non_mount_resource2,
self.mount_resource1,
self.mount_resource2,
],
volumes=[],
) == [
get_volume_from_secret(secret=self.mount_resource1),
get_volume_from_secret(secret=self.mount_resource2),
get_volume_from_config_map(config_map=self.mount_resource1),
get_volume_from_config_map(config_map=self.mount_resource2),
]
def test_all_volumes_and_init_in_the_same_context(self):
connection_by_names = {
self.s3_store.name: self.s3_store,
self.gcs_store.name: self.gcs_store,
self.az_store.name: self.az_store,
self.claim_store.name: self.claim_store,
self.host_path_store.name: self.host_path_store,
}
# Test all init are in the same context
pod_volumes = get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=True,
auth=True,
collect_artifacts=True,
collect_logs=True,
)
),
artifacts_store=self.claim_store,
init_connections=[
V1Init(connection=self.s3_store.name),
V1Init(connection=self.gcs_store.name),
V1Init(connection=self.az_store.name),
V1Init(connection=self.claim_store.name),
V1Init(connection=self.host_path_store.name),
],
connections=[],
connection_by_names=connection_by_names,
secrets=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
config_maps=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
volumes=[self.vol1, self.vol2, self.vol3],
)
# 1: logs/output contexts (same volume) / 1 managed contexts
# 3: 3 context requested constant contexts
# 3: 3 volumes
# 7: 2 mount volumes
# 1: 1 mount secret
assert len(pod_volumes) == 1 + 3 + 3 + 2 + 1
# Test all init are in the same context
pod_volumes = get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=True,
auth=True,
collect_artifacts=True,
collect_logs=True,
)
),
artifacts_store=self.claim_store,
init_connections=[
V1Init(connection=self.s3_store.name),
V1Init(connection=self.gcs_store.name),
V1Init(connection=self.az_store.name),
V1Init(connection=self.claim_store.name),
V1Init(connection=self.host_path_store.name),
],
connections=list(connection_by_names.keys()),
connection_by_names=connection_by_names,
secrets=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
config_maps=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
volumes=[self.vol1, self.vol2, self.vol3],
)
# 1: logs/output contexts (same volume) / 1 managed contexts
# 3: 3 context requested constant contexts
# 3: 3 volumes
# 7: 2 mount volumes
# 4: 1 mount resources (secrets + configs)
assert len(pod_volumes) == 1 + 3 + 3 + 2 + 1
# Enable requesting resources
self.mount_resource1.is_requested = True
self.mount_resource2.is_requested = True
# Test all init are in the same context and requested values
pod_volumes = get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=True,
auth=True,
collect_artifacts=True,
collect_logs=True,
)
),
artifacts_store=self.claim_store,
init_connections=[
V1Init(connection=self.s3_store.name),
V1Init(connection=self.gcs_store.name),
V1Init(connection=self.az_store.name),
V1Init(connection=self.claim_store.name),
V1Init(connection=self.host_path_store.name),
],
connections=list(connection_by_names.keys()),
connection_by_names=connection_by_names,
secrets=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
config_maps=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
volumes=[self.vol1, self.vol2, self.vol3],
)
# 1: logs/output contexts (same volume) / 1 managed contexts
# 3: 3 context requested constant contexts
# 3: 3 volumes
# 7: 2 mount volumes
# 4: 4 mount resources (secrets + configs)
assert len(pod_volumes) == 1 + 3 + 3 + 2 + 4
def test_all_volumes(self):
connection_by_names = {
self.s3_store.name: self.s3_store,
self.gcs_store.name: self.gcs_store,
self.az_store.name: self.az_store,
self.claim_store.name: self.claim_store,
self.host_path_store.name: self.host_path_store,
}
init_connections = [
V1Init(connection=self.s3_store.name, path="/test-1"),
V1Init(connection=self.gcs_store.name, path="/test-2"),
V1Init(connection=self.az_store.name, path="/test-3"),
V1Init(connection=self.claim_store.name, path="/test-4"),
V1Init(connection=self.host_path_store.name, path="/test-5"),
]
pod_volumes = get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=True,
auth=True,
collect_artifacts=True,
collect_logs=True,
)
),
artifacts_store=self.claim_store,
init_connections=init_connections,
connection_by_names=connection_by_names,
connections=[],
secrets=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
config_maps=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
volumes=[self.vol1, self.vol2, self.vol3],
)
# 1: logs/output contexts (same volume)
# 3: 3 context requested constant contexts
# 3: 3 volumes
# 5: 5 managed contexts + 2 mount volumes
# 1: 1 secret
assert len(pod_volumes) == 1 + 3 + 3 + 7 + 1
pod_volumes = get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=True,
auth=True,
collect_artifacts=True,
collect_logs=True,
)
),
artifacts_store=self.claim_store,
init_connections=init_connections,
connection_by_names=connection_by_names,
connections=list(connection_by_names.keys()),
secrets=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
config_maps=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
volumes=[self.vol1, self.vol2, self.vol3],
)
# 1: logs/output contexts (same volume)
# 3: 3 context requested constant contexts
# 3: 3 volumes
# 7: 5 managed contexts + 2 mount volumes
assert len(pod_volumes) == 1 + 3 + 3 + 7 + 1
# Enable requesting resources
self.mount_resource1.is_requested = True
self.mount_resource2.is_requested = True
pod_volumes = get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=True,
auth=True,
collect_artifacts=True,
collect_logs=True,
)
),
artifacts_store=self.claim_store,
init_connections=init_connections,
connection_by_names=connection_by_names,
connections=[],
secrets=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
config_maps=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
volumes=[self.vol1, self.vol2, self.vol3],
)
# 1: logs/output contexts (same volume)
# 3: 3 context requested constant contexts
# 3: 3 volumes
# 7: 5 managed contexts + 2 mount volumes
# 4: 4 mount resources (secrets + configs)
assert len(pod_volumes) == 1 + 3 + 3 + 7 + 4
pod_volumes = get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=True,
auth=True,
collect_artifacts=True,
collect_logs=True,
)
),
artifacts_store=self.s3_store,
init_connections=init_connections,
connections=list(connection_by_names.keys()),
connection_by_names=connection_by_names,
secrets=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
config_maps=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
volumes=[self.vol1, self.vol2, self.vol3],
)
# 1: logs/output contexts (same volume)
# 3: 3 context requested constant contexts
# 3: 3 volumes
# 7: 5 managed contexts + 2 mount volumes
# 4: 4 mount resources (secrets + configs)
assert len(pod_volumes) == 1 + 3 + 3 + 7 + 4
def test_all_volumes_and_artifacts_store(self):
connection_by_names = {
self.s3_store.name: self.s3_store,
self.gcs_store.name: self.gcs_store,
self.az_store.name: self.az_store,
self.claim_store.name: self.claim_store,
self.host_path_store.name: self.host_path_store,
}
init_connections = [
V1Init(connection=self.s3_store.name, path="/test-1"),
V1Init(connection=self.gcs_store.name, path="/test-2"),
V1Init(connection=self.az_store.name, path="/test-3"),
V1Init(connection=self.claim_store.name, path="/test-4"),
V1Init(connection=self.host_path_store.name, path="/test-5"),
]
pod_volumes = get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=True,
auth=True,
collect_artifacts=False,
collect_logs=False,
)
),
artifacts_store=None,
init_connections=init_connections,
connection_by_names=connection_by_names,
connections=[],
secrets=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
config_maps=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
volumes=[self.vol1, self.vol2, self.vol3],
)
# 3: 3 context requested constant contexts
# 3: 3 volumes
# 5: 5 managed contexts + 2 mount volumes
# 1: 1 secret
assert len(pod_volumes) == 3 + 3 + 7 + 1
pod_volumes = get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=True,
auth=True,
collect_artifacts=False,
collect_logs=False,
)
),
artifacts_store=None,
init_connections=init_connections,
connections=list(connection_by_names.keys()),
connection_by_names=connection_by_names,
secrets=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
config_maps=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
volumes=[self.vol1, self.vol2, self.vol3],
)
# 3: 3 context requested constant contexts
# 3: 3 volumes
# 7: 5 managed contexts + 2 mount volumes
assert len(pod_volumes) == 3 + 3 + 7 + 1
pod_volumes = get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=True,
auth=True,
collect_artifacts=True,
collect_logs=True,
)
),
artifacts_store=self.host_path_store,
init_connections=[
V1Init(connection=self.s3_store.name),
V1Init(connection=self.gcs_store.name),
V1Init(connection=self.az_store.name),
],
connections=list(connection_by_names.keys()),
connection_by_names=connection_by_names,
secrets=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
config_maps=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
volumes=[self.vol1, self.vol2, self.vol3],
)
# 4: 4 context requested constant contexts / init volumes contexts
# 3: 3 volumes
# 2: 2 managed volumes
assert len(pod_volumes) == 4 + 2 + 3 + 1
# Enable requesting resources
self.mount_resource1.is_requested = True
self.mount_resource2.is_requested = True
pod_volumes = get_pod_volumes(
contexts=PluginsContextsSpec.from_config(
V1Plugins(
docker=True,
shm=True,
auth=True,
collect_artifacts=True,
collect_logs=True,
)
),
artifacts_store=self.host_path_store,
init_connections=[
V1Init(connection=self.s3_store.name),
V1Init(connection=self.gcs_store.name),
V1Init(connection=self.az_store.name),
],
connections=list(connection_by_names.keys()),
connection_by_names=connection_by_names,
secrets=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
config_maps=[
self.non_mount_resource1,
self.non_mount_resource1,
self.mount_resource1,
self.mount_resource2,
],
volumes=[self.vol1, self.vol2, self.vol3],
)
# 4: 4 context requested constant contexts / init volumes contexts
# 3: 3 volumes
# 2: 2 managed volumes
# 4: 4 mount resources (secrets + configs)
assert len(pod_volumes) == 4 + 2 + 3 + 4
| 34.909479
| 94
| 0.51102
| 3,627
| 40,879
| 5.46595
| 0.053212
| 0.07203
| 0.073544
| 0.057201
| 0.87425
| 0.853518
| 0.840706
| 0.819016
| 0.809634
| 0.792434
| 0
| 0.021862
| 0.409183
| 40,879
| 1,170
| 95
| 34.939316
| 0.798981
| 0.061768
| 0
| 0.760267
| 0
| 0
| 0.010606
| 0
| 0
| 0
| 0
| 0
| 0.061127
| 1
| 0.019102
| false
| 0.000955
| 0.009551
| 0
| 0.029608
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b24746671860d93d0fd061f15b25044e9365679d
| 1,803
|
py
|
Python
|
pvfactors/tests/test_geometry/test_data/__init__.py
|
tcapelle/pvfactors
|
1aaf6cdd3066a3a68d93db4ad7abcf10e97b5620
|
[
"BSD-3-Clause"
] | null | null | null |
pvfactors/tests/test_geometry/test_data/__init__.py
|
tcapelle/pvfactors
|
1aaf6cdd3066a3a68d93db4ad7abcf10e97b5620
|
[
"BSD-3-Clause"
] | null | null | null |
pvfactors/tests/test_geometry/test_data/__init__.py
|
tcapelle/pvfactors
|
1aaf6cdd3066a3a68d93db4ad7abcf10e97b5620
|
[
"BSD-3-Clause"
] | null | null | null |
import numpy as np
vm_flat_orderedpvarray = np.array(
[[0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 3, 0, 3, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 3, 0, 3, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 3, 0, 3, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 3, 0, 3, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 3, 0, 3, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 3, 0, 3, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 3, 0, 3, 0, 3, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4],
[2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4],
[2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4],
[2, 2, 2, 2, 2, 2, 2, 0, 0, 0, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],
dtype=int
)
vm_right_orderedpvarray = np.array(
[[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9, 0, 9, 0, 9, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 9, 0, 9, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 7, 9, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 7, 9, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 7, 0, 7, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 7, 0, 7, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 7, 0, 7, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 7, 9, 0, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 7, 0, 7, 1],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7, 0, 7, 0, 7, 1],
[8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4],
[0, 6, 6, 6, 6, 6, 6, 6, 6, 6, 0, 0, 10, 0, 0, 0, 5],
[8, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10, 0, 0, 0, 0, 4],
[0, 0, 6, 6, 6, 6, 6, 6, 6, 6, 0, 0, 0, 0, 10, 0, 5],
[8, 8, 8, 8, 0, 0, 0, 8, 0, 0, 0, 0, 0, 10, 0, 0, 4],
[0, 0, 0, 0, 6, 6, 6, 0, 6, 6, 0, 0, 0, 0, 0, 0, 5],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0]],
dtype=int
)
| 42.928571
| 58
| 0.321131
| 503
| 1,803
| 1.143141
| 0.043738
| 0.977391
| 1.288696
| 1.502609
| 0.94087
| 0.913043
| 0.873043
| 0.873043
| 0.850435
| 0.845217
| 0
| 0.428947
| 0.36772
| 1,803
| 41
| 59
| 43.97561
| 0.075439
| 0
| 0
| 0.578947
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.026316
| 0
| 0.026316
| 0
| 0
| 0
| 1
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 12
|
b25e1c2de7989ef15ec8bb06990be3f6ed046a2b
| 147
|
py
|
Python
|
handlers/__init__.py
|
intikamailesi/Guess-the-number-telegram-bot
|
70504597c27c5f7fac5cff88e4fb2a696111c82f
|
[
"MIT"
] | null | null | null |
handlers/__init__.py
|
intikamailesi/Guess-the-number-telegram-bot
|
70504597c27c5f7fac5cff88e4fb2a696111c82f
|
[
"MIT"
] | null | null | null |
handlers/__init__.py
|
intikamailesi/Guess-the-number-telegram-bot
|
70504597c27c5f7fac5cff88e4fb2a696111c82f
|
[
"MIT"
] | 1
|
2022-03-21T06:28:41.000Z
|
2022-03-21T06:28:41.000Z
|
from .start_handler import dp
from .user_play_handler import dp
from .bot_play_handler import dp
from .finish_handler import dp
__all__ = ["dp"]
| 18.375
| 33
| 0.795918
| 24
| 147
| 4.458333
| 0.416667
| 0.485981
| 0.560748
| 0.53271
| 0.429907
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.142857
| 147
| 7
| 34
| 21
| 0.849206
| 0
| 0
| 0
| 0
| 0
| 0.013605
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.8
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
b2888a72b426440f0f8fb79b98732d2b20b1c4a4
| 139
|
py
|
Python
|
ext/pybind11/tests/test_installed_target/test.py
|
chohy/cho_gem5
|
1207718477576053ee6222faff03dd888a90dbcf
|
[
"BSD-3-Clause"
] | null | null | null |
ext/pybind11/tests/test_installed_target/test.py
|
chohy/cho_gem5
|
1207718477576053ee6222faff03dd888a90dbcf
|
[
"BSD-3-Clause"
] | null | null | null |
ext/pybind11/tests/test_installed_target/test.py
|
chohy/cho_gem5
|
1207718477576053ee6222faff03dd888a90dbcf
|
[
"BSD-3-Clause"
] | null | null | null |
import test_installed_target
assert test_installed_target.add(1, 2) == 3
print('test_installed_target imports, runs, and adds: 1 + 2 = 3')
| 34.75
| 65
| 0.769784
| 23
| 139
| 4.391304
| 0.608696
| 0.386139
| 0.564356
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.04918
| 0.122302
| 139
| 3
| 66
| 46.333333
| 0.778689
| 0
| 0
| 0
| 0
| 0
| 0.402878
| 0.151079
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a2c6a2a4632abf540b1ec11d9239a96b5dca9d7f
| 17,622
|
py
|
Python
|
sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py
|
vchske/azure-sdk-for-python
|
6383ed3676b7355af7be394562b126209961ec13
|
[
"MIT"
] | null | null | null |
sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py
|
vchske/azure-sdk-for-python
|
6383ed3676b7355af7be394562b126209961ec13
|
[
"MIT"
] | 1
|
2019-06-04T18:12:16.000Z
|
2019-06-04T18:12:16.000Z
|
sdk/storage/azure-storage-queue/azure/storage/queue/_generated/operations/_messages_operations.py
|
vchske/azure-sdk-for-python
|
6383ed3676b7355af7be394562b126209961ec13
|
[
"MIT"
] | 1
|
2019-06-17T22:18:23.000Z
|
2019-06-17T22:18:23.000Z
|
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
# pylint: skip-file
from azure.core.exceptions import map_error
from .. import models
class MessagesOperations(object):
"""MessagesOperations operations.
You should not instantiate directly this class, but create a Client instance that will create it for you and attach it as attribute.
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
:ivar peekonly: . Constant value: "true".
"""
models = models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
self.peekonly = "true"
def dequeue(self, number_of_messages=None, visibilitytimeout=None, timeout=None, request_id=None, cls=None, **kwargs):
"""The Dequeue operation retrieves one or more messages from the front of
the queue.
:param number_of_messages: Optional. A nonzero integer value that
specifies the number of messages to retrieve from the queue, up to a
maximum of 32. If fewer are visible, the visible messages are
returned. By default, a single message is retrieved from the queue
with this operation.
:type number_of_messages: int
:param visibilitytimeout: Optional. Specifies the new visibility
timeout value, in seconds, relative to server time. The default value
is 30 seconds. A specified value must be larger than or equal to 1
second, and cannot be larger than 7 days, or larger than 2 hours on
REST protocol versions prior to version 2011-08-18. The visibility
timeout of a message can be set to a value later than the expiry time.
:type visibilitytimeout: int
:param timeout: The The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-queue-service-operations>Setting
Timeouts for Queue Service Operations.</a>
:type timeout: int
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: list or the result of cls(response)
:rtype: list[~queue.models.DequeuedMessageItem]
:raises:
:class:`StorageErrorException<queue.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.dequeue.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if number_of_messages is not None:
query_parameters['numofmessages'] = self._serialize.query("number_of_messages", number_of_messages, 'int', minimum=1)
if visibilitytimeout is not None:
query_parameters['visibilitytimeout'] = self._serialize.query("visibilitytimeout", visibilitytimeout, 'int', maximum=604800, minimum=0)
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/xml'
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
header_dict = {}
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[DequeuedMessageItem]', response)
header_dict = {
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
if cls:
return cls(response, deserialized, header_dict)
return deserialized
dequeue.metadata = {'url': '/{queueName}/messages'}
def clear(self, timeout=None, request_id=None, cls=None, **kwargs):
"""The Clear operation deletes all messages from the specified queue.
:param timeout: The The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-queue-service-operations>Setting
Timeouts for Queue Service Operations.</a>
:type timeout: int
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: None or the result of cls(response)
:rtype: None
:raises:
:class:`StorageErrorException<queue.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.clear.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {}
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
# Construct and send request
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
if cls:
response_headers = {
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
return cls(response, None, response_headers)
clear.metadata = {'url': '/{queueName}/messages'}
def enqueue(self, queue_message, visibilitytimeout=None, message_time_to_live=None, timeout=None, request_id=None, cls=None, **kwargs):
"""The Enqueue operation adds a new message to the back of the message
queue. A visibility timeout can also be specified to make the message
invisible until the visibility timeout expires. A message must be in a
format that can be included in an XML request with UTF-8 encoding. The
encoded message can be up to 64 KB in size for versions 2011-08-18 and
newer, or 8 KB in size for previous versions.
:param queue_message: A Message object which can be stored in a Queue
:type queue_message: ~queue.models.QueueMessage
:param visibilitytimeout: Optional. Specifies the new visibility
timeout value, in seconds, relative to server time. The default value
is 30 seconds. A specified value must be larger than or equal to 1
second, and cannot be larger than 7 days, or larger than 2 hours on
REST protocol versions prior to version 2011-08-18. The visibility
timeout of a message can be set to a value later than the expiry time.
:type visibilitytimeout: int
:param message_time_to_live: Optional. Specifies the time-to-live
interval for the message, in seconds. Prior to version 2017-07-29, the
maximum time-to-live allowed is 7 days. For version 2017-07-29 or
later, the maximum time-to-live can be any positive number, as well as
-1 indicating that the message does not expire. If this parameter is
omitted, the default time-to-live is 7 days.
:type message_time_to_live: int
:param timeout: The The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-queue-service-operations>Setting
Timeouts for Queue Service Operations.</a>
:type timeout: int
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: list or the result of cls(response)
:rtype: list[~queue.models.EnqueuedMessage]
:raises:
:class:`StorageErrorException<queue.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.enqueue.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if visibilitytimeout is not None:
query_parameters['visibilitytimeout'] = self._serialize.query("visibilitytimeout", visibilitytimeout, 'int', maximum=604800, minimum=0)
if message_time_to_live is not None:
query_parameters['messagettl'] = self._serialize.query("message_time_to_live", message_time_to_live, 'int', minimum=-1)
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/xml'
header_parameters['Content-Type'] = 'application/xml; charset=utf-8'
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
# Construct body
body_content = self._serialize.body(queue_message, 'QueueMessage')
# Construct and send request
request = self._client.post(url, query_parameters, header_parameters, body_content)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
header_dict = {}
deserialized = None
if response.status_code == 201:
deserialized = self._deserialize('[EnqueuedMessage]', response)
header_dict = {
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
if cls:
return cls(response, deserialized, header_dict)
return deserialized
enqueue.metadata = {'url': '/{queueName}/messages'}
def peek(self, number_of_messages=None, timeout=None, request_id=None, cls=None, **kwargs):
"""The Peek operation retrieves one or more messages from the front of the
queue, but does not alter the visibility of the message.
:param number_of_messages: Optional. A nonzero integer value that
specifies the number of messages to retrieve from the queue, up to a
maximum of 32. If fewer are visible, the visible messages are
returned. By default, a single message is retrieved from the queue
with this operation.
:type number_of_messages: int
:param timeout: The The timeout parameter is expressed in seconds. For
more information, see <a
href="https://docs.microsoft.com/en-us/rest/api/storageservices/setting-timeouts-for-queue-service-operations>Setting
Timeouts for Queue Service Operations.</a>
:type timeout: int
:param request_id: Provides a client-generated, opaque value with a 1
KB character limit that is recorded in the analytics logs when storage
analytics logging is enabled.
:type request_id: str
:param callable cls: A custom type or function that will be passed the
direct response
:return: list or the result of cls(response)
:rtype: list[~queue.models.PeekedMessageItem]
:raises:
:class:`StorageErrorException<queue.models.StorageErrorException>`
"""
error_map = kwargs.pop('error_map', None)
# Construct URL
url = self.peek.metadata['url']
path_format_arguments = {
'url': self._serialize.url("self._config.url", self._config.url, 'str', skip_quote=True)
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {}
if number_of_messages is not None:
query_parameters['numofmessages'] = self._serialize.query("number_of_messages", number_of_messages, 'int', minimum=1)
if timeout is not None:
query_parameters['timeout'] = self._serialize.query("timeout", timeout, 'int', minimum=0)
query_parameters['peekonly'] = self._serialize.query("self.peekonly", self.peekonly, 'str')
# Construct headers
header_parameters = {}
header_parameters['Accept'] = 'application/xml'
header_parameters['x-ms-version'] = self._serialize.header("self._config.version", self._config.version, 'str')
if request_id is not None:
header_parameters['x-ms-client-request-id'] = self._serialize.header("request_id", request_id, 'str')
# Construct and send request
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise models.StorageErrorException(response, self._deserialize)
header_dict = {}
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('[PeekedMessageItem]', response)
header_dict = {
'x-ms-request-id': self._deserialize('str', response.headers.get('x-ms-request-id')),
'x-ms-version': self._deserialize('str', response.headers.get('x-ms-version')),
'Date': self._deserialize('rfc-1123', response.headers.get('Date')),
'x-ms-error-code': self._deserialize('str', response.headers.get('x-ms-error-code')),
}
if cls:
return cls(response, deserialized, header_dict)
return deserialized
peek.metadata = {'url': '/{queueName}/messages'}
| 51.078261
| 147
| 0.660424
| 2,156
| 17,622
| 5.268553
| 0.124304
| 0.028524
| 0.025354
| 0.027467
| 0.816445
| 0.792763
| 0.792763
| 0.786689
| 0.786689
| 0.783168
| 0
| 0.009267
| 0.234536
| 17,622
| 344
| 148
| 51.226744
| 0.832827
| 0.379299
| 0
| 0.713376
| 1
| 0
| 0.139189
| 0.019285
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031847
| false
| 0
| 0.012739
| 0
| 0.101911
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
a2d8b956152b2fba07ca200e44e7fca15313869b
| 6,068
|
py
|
Python
|
main.py
|
SadhukhanR/BMI-Labs
|
d95afce8a2fd87f66b2b551db88753a57d8687f8
|
[
"MIT"
] | 2
|
2021-11-11T10:16:53.000Z
|
2021-11-12T16:48:11.000Z
|
main.py
|
SadhukhanR/BMI-Labs
|
d95afce8a2fd87f66b2b551db88753a57d8687f8
|
[
"MIT"
] | null | null | null |
main.py
|
SadhukhanR/BMI-Labs
|
d95afce8a2fd87f66b2b551db88753a57d8687f8
|
[
"MIT"
] | 1
|
2021-11-10T10:51:35.000Z
|
2021-11-10T10:51:35.000Z
|
# This is a demo project
# R Sadhukhan
# bmi labs
import numpy as np
import matplotlib.pyplot as plt
#info gatt
x = input('Enter Your Name :/>')
y =eval(input('Enter Your Age:/>'))
a = eval(input('Enter Your Weight in kg:/>'))
b = eval(input('Enter Your Height in ft:/>'))
#calculation
c = b*0.3048
d = a/c**2
#looping
#result
print("calculating bmi index ...............")
print("=====================================")
print("RESULT !!!")
print("***********")
print("Your Name:",x)
print("Your Age:",y)
print("Your Weight:",a,"kg")
print("Your Height:",b,"ft")
print("Your Bmi Index =",d)
l = np.linspace(1,d,100)
m = np.linspace(1,a,100)
n = np.exp(l)
print("******************")
if d < 18.5 :
print("You Are Underweight !!!")
print("BMI Labs Remark !!!")
print("possible nutritional deficiency and osteoporosis")
print("=================================")
print("Genarating Graphs !!!")
plt.subplot(211)
plt.title('You Are Underweight!!!',color='r',size=10)
plt.suptitle('BMI Index Graph',color='g',size=20)
plt.xlabel('Weight->',size=10)
plt.ylabel('BMI index->',size=10)
plt.plot(m,l,color='r',lw=2)
plt.grid()
plt.show()
plt.subplot(212)
plt.title('You Are Underweight!!!',color='r',size=10)
plt.suptitle('BMI Index Graph',color='g',size=20)
plt.xlabel('BMI->',size=10)
plt.ylabel('exp BMI->',size=10)
plt.plot(l,n,color='r',lw=2)
plt.grid()
plt.show()
your = (d,d)
und = (18.5,18.5)
nor = (22.9,22.9)
mov = (27.4,27.4)
ove = (40,40)
z = np.arange(len(und))
bar_width = 0.1
q = z+0.1
plt.xlim(0,3)
plt.bar(z+bar_width,your,bar_width,label='Your BMI Index',color='r')
plt.bar(q+2*bar_width,und,bar_width,label='Underweight',color='r')
plt.bar(q+3*bar_width,nor,bar_width,label='Normal BMI',color='g')
plt.bar(q+4*bar_width,mov,bar_width,label='Mild Overweight',color='y')
plt.bar(q+5*bar_width,ove,bar_width,label='Overweight',color='r')
plt.legend()
plt.show()
if 18.5 < d < 22.9 :
print("Normal BMI")
print("BMI Labs Remark !!!")
print("LOW RISK !")
print("=================================")
print("Genarating Graphs !!!")
plt.subplot(211)
plt.title('Normal BMI',color='g',size=10)
plt.suptitle('BMI Index Graph',color='g',size=20)
plt.xlabel('Weight->',size=10)
plt.ylabel('BMI index->',size=10)
plt.plot(m,l,color='g',lw=2)
plt.grid()
plt.show()
plt.subplot(212)
plt.title('Normal BMI',color='g',size=10)
plt.suptitle('BMI Index Graph',color='g',size=20)
plt.xlabel('BMI->',size=10)
plt.ylabel('exp BMI->',size=10)
plt.plot(l,n,color='g',lw=2)
plt.grid()
plt.show()
your = (d,d)
und = (18.5,18.5)
nor = (22.9,22.9)
mov = (27.4,27.4)
ove = (40,40)
z = np.arange(len(und))
bar_width = 0.1
q = z+0.1
plt.xlim(0,3)
plt.bar(z+bar_width,your,bar_width,label='Your BMI Index',color='g')
plt.bar(q+2*bar_width,und,bar_width,label='Underweight',color='r')
plt.bar(q+3*bar_width,nor,bar_width,label='Normal BMI',color='g')
plt.bar(q+4*bar_width,mov,bar_width,label='Mild Overweight',color='y')
plt.bar(q+5*bar_width,ove,bar_width,label='Overweight',color='r')
plt.legend()
plt.show()
if 23.0 < d < 27.4 :
print("You Are Mild to moderate overwright!!")
print("BMI Labs Remark !!!")
print("Heart disease ,High blood pressuere, stroke, Diabetes Mellitus")
print("=================================")
print("Genarating Graphs !!!")
plt.subplot(211)
plt.title('You Are Mild Overweight!!',color='y',size=10)
plt.suptitle('BMI Index Graph',color='g',size=20)
plt.xlabel('Weight->',size=10)
plt.ylabel('BMI index->',size=10)
plt.plot(m,l,color='y',lw=2)
plt.grid()
plt.show()
plt.subplot(212)
plt.title('You Are Mild Overweight!!',color='y',size=10)
plt.suptitle('BMI Index Graph',color='g',size=20)
plt.xlabel('BMI->',size=10)
plt.ylabel('exp BMI->',size=10)
plt.plot(l,n,color='y',lw=2)
plt.grid()
plt.show()
your = (d,d)
und = (18.5,18.5)
nor = (22.9,22.9)
mov = (27.4,27.4)
ove = (40,40)
z = np.arange(len(und))
bar_width = 0.1
q = z+0.1
plt.xlim(0,3)
plt.bar(z+bar_width,your,bar_width,label='Your BMI Index',color='y')
plt.bar(q+2*bar_width,und,bar_width,label='Underweight',color='r')
plt.bar(q+3*bar_width,nor,bar_width,label='Normal BMI',color='g')
plt.bar(q+4*bar_width,mov,bar_width,label='Mild Overweight',color='y')
plt.bar(q+5*bar_width,ove,bar_width,label='Overweight',color='r')
plt.legend()
plt.show()
if d >= 27.5 :
print("You Are Very Overweight or Obese !!!")
print("BMI Labs Remark !!!")
print("High risk of devoloping heart disease, High blood presure, Stroke,Diabetes Mellitus, Metabolic syndrome")
print("=================================")
print("Genarating Graphs !!!")
plt.subplot(211)
plt.title('You Are very Overweight!!!',color='r',size=10)
plt.suptitle('BMI Index Graph',color='g',size=20)
plt.xlabel('Weight->',size=10)
plt.ylabel('BMI index->',size=10)
plt.plot(m,l,color='r',lw=2)
plt.grid()
plt.show()
plt.subplot(212)
plt.title('You Are very Overweight!!!',color='r',size=10)
plt.suptitle('BMI Index Graph',color='g',size=20)
plt.xlabel('BMI->',size=10)
plt.ylabel('exp BMI->',size=10)
plt.plot(l,n,color='r',lw=2)
plt.grid()
plt.show()
your = (d,d)
und = (18.5,18.5)
nor = (22.9,22.9)
mov = (27.4,27.4)
ove = (40,40)
z = np.arange(len(und))
bar_width = 0.1
q = z+0.1
plt.xlim(0,3)
plt.bar(z+bar_width,your,bar_width,label='Your BMI Index',color='r')
plt.bar(q+2*bar_width,und,bar_width,label='Underweight',color='r')
plt.bar(q+3*bar_width,nor,bar_width,label='Normal BMI',color='g')
plt.bar(q+4*bar_width,mov,bar_width,label='Mild Overweight',color='y')
plt.bar(q+5*bar_width,ove,bar_width,label='Overweight',color='r')
plt.legend()
plt.show()
| 32.978261
| 116
| 0.590639
| 1,006
| 6,068
| 3.518887
| 0.124254
| 0.099435
| 0.061017
| 0.038418
| 0.800565
| 0.774011
| 0.774011
| 0.774011
| 0.772881
| 0.761864
| 0
| 0.050854
| 0.170402
| 6,068
| 183
| 117
| 33.15847
| 0.652364
| 0.012525
| 0
| 0.774566
| 0
| 0
| 0.26534
| 0.028256
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.011561
| 0
| 0.011561
| 0.17341
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a2f714ca969367d4a448568ab24ca4ef4560e2aa
| 5,740
|
py
|
Python
|
tests/test_trigger.py
|
trakken/gtm_manager
|
4825cc87daf36bf2feeae8c463243b128008e36f
|
[
"MIT"
] | 7
|
2018-12-14T11:05:44.000Z
|
2021-12-03T18:33:17.000Z
|
tests/test_trigger.py
|
trakken/gtm_manager
|
4825cc87daf36bf2feeae8c463243b128008e36f
|
[
"MIT"
] | 2
|
2019-07-16T09:40:47.000Z
|
2019-08-22T20:57:04.000Z
|
tests/test_trigger.py
|
trakken/gtm_manager
|
4825cc87daf36bf2feeae8c463243b128008e36f
|
[
"MIT"
] | 3
|
2021-07-21T07:55:50.000Z
|
2022-01-14T12:54:02.000Z
|
# pylint: disable=missing-docstring
from gtm_manager.trigger import GTMTrigger
def test_init(mock_service):
service, responses = mock_service("trigger_get.json")
trigger_get = responses[0]
trigger = GTMTrigger(
path="accounts/1234/containers/1234/workspaces/1/triggers/1", service=service
)
assert trigger.maxTimerLengthSeconds == trigger_get.get("maxTimerLengthSeconds")
assert trigger.totalTimeMinMilliseconds == trigger_get.get(
"totalTimeMinMilliseconds"
)
assert trigger.uniqueTriggerId == trigger_get.get("uniqueTriggerId")
assert trigger.verticalScrollPercentageList == trigger_get.get(
"verticalScrollPercentageList"
)
assert trigger.horizontalScrollPercentageList == trigger_get.get(
"horizontalScrollPercentageList"
)
assert trigger.containerId == trigger_get.get("containerId")
assert trigger.waitForTagsTimeout == trigger_get.get("waitForTagsTimeout")
assert trigger.accountId == trigger_get.get("accountId")
assert trigger.waitForTags == trigger_get.get("waitForTags")
assert trigger.intervalSeconds == trigger_get.get("intervalSeconds")
assert trigger.eventName == trigger_get.get("eventName")
assert trigger.visibilitySelector == trigger_get.get("visibilitySelector")
assert trigger.workspaceId == trigger_get.get("workspaceId")
assert trigger.customEventFilter == trigger_get.get("customEventFilter")
assert trigger.parentFolderId == trigger_get.get("parentFolderId")
assert trigger.continuousTimeMinMilliseconds == trigger_get.get(
"continuousTimeMinMilliseconds"
)
assert trigger.selector == trigger_get.get("selector")
assert trigger.triggerId == trigger_get.get("triggerId")
assert trigger.tagManagerUrl == trigger_get.get("tagManagerUrl")
assert trigger.fingerprint == trigger_get.get("fingerprint")
assert trigger.visiblePercentageMax == trigger_get.get("visiblePercentageMax")
assert trigger.name == trigger_get.get("name")
assert trigger.visiblePercentageMin == trigger_get.get("visiblePercentageMin")
assert trigger.type == trigger_get.get("type")
assert trigger.notes == trigger_get.get("notes")
assert trigger.interval == trigger_get.get("interval")
assert trigger.filter == trigger_get.get("filter")
assert trigger.autoEventFilter == trigger_get.get("autoEventFilter")
assert trigger.limit == trigger_get.get("limit")
assert trigger.checkValidation == trigger_get.get("checkValidation")
assert trigger.path == trigger_get.get("path")
trigger = GTMTrigger(
trigger=trigger_get,
parent="accounts/1234/containers/1234/workspaces/1",
service=service,
)
assert trigger.maxTimerLengthSeconds == trigger_get.get("maxTimerLengthSeconds")
assert trigger.totalTimeMinMilliseconds == trigger_get.get(
"totalTimeMinMilliseconds"
)
assert trigger.uniqueTriggerId == trigger_get.get("uniqueTriggerId")
assert trigger.verticalScrollPercentageList == trigger_get.get(
"verticalScrollPercentageList"
)
assert trigger.horizontalScrollPercentageList == trigger_get.get(
"horizontalScrollPercentageList"
)
assert trigger.containerId == trigger_get.get("containerId")
assert trigger.waitForTagsTimeout == trigger_get.get("waitForTagsTimeout")
assert trigger.accountId == trigger_get.get("accountId")
assert trigger.waitForTags == trigger_get.get("waitForTags")
assert trigger.intervalSeconds == trigger_get.get("intervalSeconds")
assert trigger.eventName == trigger_get.get("eventName")
assert trigger.visibilitySelector == trigger_get.get("visibilitySelector")
assert trigger.workspaceId == trigger_get.get("workspaceId")
assert trigger.customEventFilter == trigger_get.get("customEventFilter")
assert trigger.parentFolderId == trigger_get.get("parentFolderId")
assert trigger.continuousTimeMinMilliseconds == trigger_get.get(
"continuousTimeMinMilliseconds"
)
assert trigger.selector == trigger_get.get("selector")
assert trigger.triggerId == trigger_get.get("triggerId")
assert trigger.tagManagerUrl == trigger_get.get("tagManagerUrl")
assert trigger.fingerprint == trigger_get.get("fingerprint")
assert trigger.visiblePercentageMax == trigger_get.get("visiblePercentageMax")
assert trigger.name == trigger_get.get("name")
assert trigger.visiblePercentageMin == trigger_get.get("visiblePercentageMin")
assert trigger.type == trigger_get.get("type")
assert trigger.notes == trigger_get.get("notes")
assert trigger.interval == trigger_get.get("interval")
assert trigger.filter == trigger_get.get("filter")
assert trigger.autoEventFilter == trigger_get.get("autoEventFilter")
assert trigger.limit == trigger_get.get("limit")
assert trigger.checkValidation == trigger_get.get("checkValidation")
assert trigger.path == trigger_get.get("path")
def test_update(mock_service):
service, responses = mock_service("trigger_get.json", "echo_request_body")
trigger_get = responses[0]
trigger = GTMTrigger(
path="accounts/1234/containers/1234/workspaces/1/triggers/3", service=service
)
update = {"name": "New Trigger Name 1", "notes": "New Trigger Notes"}
trigger.update(**update)
trigger_get_updated = {**trigger_get, **update}
assert trigger.name == trigger_get_updated.get("name")
assert trigger.notes == trigger_get_updated.get("notes")
def test_delete(mock_service):
service, _ = mock_service("trigger_get.json", "echo_request_body")
trigger = GTMTrigger(
path="accounts/1234/containers/1234/workspaces/1/triggers/1", service=service
)
trigger.delete()
| 45.555556
| 85
| 0.741812
| 583
| 5,740
| 7.149228
| 0.113208
| 0.172745
| 0.193378
| 0.024952
| 0.924424
| 0.911228
| 0.902591
| 0.902591
| 0.902591
| 0.864443
| 0
| 0.008557
| 0.144948
| 5,740
| 125
| 86
| 45.92
| 0.840668
| 0.005749
| 0
| 0.728972
| 0
| 0
| 0.208589
| 0.081507
| 0
| 0
| 0
| 0
| 0.598131
| 1
| 0.028037
| false
| 0
| 0.009346
| 0
| 0.037383
| 0.018692
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a2fbfceded82b4107701dccacfb023724aa1656b
| 22,554
|
py
|
Python
|
sdk/python/pulumi_aws/s3/bucket_lifecycle_configuration_v2.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/s3/bucket_lifecycle_configuration_v2.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_aws/s3/bucket_lifecycle_configuration_v2.py
|
chivandikwa/pulumi-aws
|
19c08bf9dcb90544450ffa4eec7bf6751058fde2
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['BucketLifecycleConfigurationV2Args', 'BucketLifecycleConfigurationV2']
@pulumi.input_type
class BucketLifecycleConfigurationV2Args:
def __init__(__self__, *,
bucket: pulumi.Input[str],
rules: pulumi.Input[Sequence[pulumi.Input['BucketLifecycleConfigurationV2RuleArgs']]],
expected_bucket_owner: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a BucketLifecycleConfigurationV2 resource.
:param pulumi.Input[str] bucket: The name of the source S3 bucket you want Amazon S3 to monitor.
:param pulumi.Input[Sequence[pulumi.Input['BucketLifecycleConfigurationV2RuleArgs']]] rules: List of configuration blocks describing the rules managing the replication documented below.
:param pulumi.Input[str] expected_bucket_owner: The account ID of the expected bucket owner. If the bucket is owned by a different account, the request will fail with an HTTP 403 (Access Denied) error.
"""
pulumi.set(__self__, "bucket", bucket)
pulumi.set(__self__, "rules", rules)
if expected_bucket_owner is not None:
pulumi.set(__self__, "expected_bucket_owner", expected_bucket_owner)
@property
@pulumi.getter
def bucket(self) -> pulumi.Input[str]:
"""
The name of the source S3 bucket you want Amazon S3 to monitor.
"""
return pulumi.get(self, "bucket")
@bucket.setter
def bucket(self, value: pulumi.Input[str]):
pulumi.set(self, "bucket", value)
@property
@pulumi.getter
def rules(self) -> pulumi.Input[Sequence[pulumi.Input['BucketLifecycleConfigurationV2RuleArgs']]]:
"""
List of configuration blocks describing the rules managing the replication documented below.
"""
return pulumi.get(self, "rules")
@rules.setter
def rules(self, value: pulumi.Input[Sequence[pulumi.Input['BucketLifecycleConfigurationV2RuleArgs']]]):
pulumi.set(self, "rules", value)
@property
@pulumi.getter(name="expectedBucketOwner")
def expected_bucket_owner(self) -> Optional[pulumi.Input[str]]:
"""
The account ID of the expected bucket owner. If the bucket is owned by a different account, the request will fail with an HTTP 403 (Access Denied) error.
"""
return pulumi.get(self, "expected_bucket_owner")
@expected_bucket_owner.setter
def expected_bucket_owner(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expected_bucket_owner", value)
@pulumi.input_type
class _BucketLifecycleConfigurationV2State:
def __init__(__self__, *,
bucket: Optional[pulumi.Input[str]] = None,
expected_bucket_owner: Optional[pulumi.Input[str]] = None,
rules: Optional[pulumi.Input[Sequence[pulumi.Input['BucketLifecycleConfigurationV2RuleArgs']]]] = None):
"""
Input properties used for looking up and filtering BucketLifecycleConfigurationV2 resources.
:param pulumi.Input[str] bucket: The name of the source S3 bucket you want Amazon S3 to monitor.
:param pulumi.Input[str] expected_bucket_owner: The account ID of the expected bucket owner. If the bucket is owned by a different account, the request will fail with an HTTP 403 (Access Denied) error.
:param pulumi.Input[Sequence[pulumi.Input['BucketLifecycleConfigurationV2RuleArgs']]] rules: List of configuration blocks describing the rules managing the replication documented below.
"""
if bucket is not None:
pulumi.set(__self__, "bucket", bucket)
if expected_bucket_owner is not None:
pulumi.set(__self__, "expected_bucket_owner", expected_bucket_owner)
if rules is not None:
pulumi.set(__self__, "rules", rules)
@property
@pulumi.getter
def bucket(self) -> Optional[pulumi.Input[str]]:
"""
The name of the source S3 bucket you want Amazon S3 to monitor.
"""
return pulumi.get(self, "bucket")
@bucket.setter
def bucket(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "bucket", value)
@property
@pulumi.getter(name="expectedBucketOwner")
def expected_bucket_owner(self) -> Optional[pulumi.Input[str]]:
"""
The account ID of the expected bucket owner. If the bucket is owned by a different account, the request will fail with an HTTP 403 (Access Denied) error.
"""
return pulumi.get(self, "expected_bucket_owner")
@expected_bucket_owner.setter
def expected_bucket_owner(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "expected_bucket_owner", value)
@property
@pulumi.getter
def rules(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['BucketLifecycleConfigurationV2RuleArgs']]]]:
"""
List of configuration blocks describing the rules managing the replication documented below.
"""
return pulumi.get(self, "rules")
@rules.setter
def rules(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['BucketLifecycleConfigurationV2RuleArgs']]]]):
pulumi.set(self, "rules", value)
class BucketLifecycleConfigurationV2(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
bucket: Optional[pulumi.Input[str]] = None,
expected_bucket_owner: Optional[pulumi.Input[str]] = None,
rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BucketLifecycleConfigurationV2RuleArgs']]]]] = None,
__props__=None):
"""
Provides an independent configuration resource for S3 bucket [lifecycle configuration](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lifecycle-mgmt.html).
## Example Usage
```python
import pulumi
import pulumi_aws as aws
bucket = aws.s3.BucketV2("bucket")
bucket_acl = aws.s3.BucketAclV2("bucketAcl",
bucket=bucket.id,
acl="private")
bucket_config = aws.s3.BucketLifecycleConfigurationV2("bucket-config",
bucket=bucket.bucket,
rules=[
aws.s3.BucketLifecycleConfigurationV2RuleArgs(
id="log",
expiration=aws.s3.BucketLifecycleConfigurationV2RuleExpirationArgs(
days=90,
),
filter=aws.s3.BucketLifecycleConfigurationV2RuleFilterArgs(
and_=aws.s3.BucketLifecycleConfigurationV2RuleFilterAndArgs(
prefix="log/",
tags={
"rule": "log",
"autoclean": "true",
},
),
),
status="Enabled",
transitions=[
aws.s3.BucketLifecycleConfigurationV2RuleTransitionArgs(
days=30,
storage_class="STANDARD_IA",
),
aws.s3.BucketLifecycleConfigurationV2RuleTransitionArgs(
days=60,
storage_class="GLACIER",
),
],
),
aws.s3.BucketLifecycleConfigurationV2RuleArgs(
id="tmp",
filter=aws.s3.BucketLifecycleConfigurationV2RuleFilterArgs(
prefix="tmp/",
),
expiration=aws.s3.BucketLifecycleConfigurationV2RuleExpirationArgs(
date="2023-01-13T00:00:00Z",
),
status="Enabled",
),
])
versioning_bucket = aws.s3.BucketV2("versioningBucket")
versioning_bucket_acl = aws.s3.BucketAclV2("versioningBucketAcl",
bucket=versioning_bucket.id,
acl="private")
versioning = aws.s3.BucketVersioningV2("versioning",
bucket=versioning_bucket.id,
versioning_configuration=aws.s3.BucketVersioningV2VersioningConfigurationArgs(
status="Enabled",
))
versioning_bucket_config = aws.s3.BucketLifecycleConfigurationV2("versioning-bucket-config",
bucket=versioning_bucket.bucket,
rules=[aws.s3.BucketLifecycleConfigurationV2RuleArgs(
id="config",
filter=aws.s3.BucketLifecycleConfigurationV2RuleFilterArgs(
prefix="config/",
),
noncurrent_version_expiration=aws.s3.BucketLifecycleConfigurationV2RuleNoncurrentVersionExpirationArgs(
noncurrent_days=90,
),
noncurrent_version_transitions=[
aws.s3.BucketLifecycleConfigurationV2RuleNoncurrentVersionTransitionArgs(
noncurrent_days=30,
storage_class="STANDARD_IA",
),
aws.s3.BucketLifecycleConfigurationV2RuleNoncurrentVersionTransitionArgs(
noncurrent_days=60,
storage_class="GLACIER",
),
],
status="Enabled",
)],
opts=pulumi.ResourceOptions(depends_on=[versioning]))
```
## Usage Notes
> **NOTE:** To avoid conflicts always add the following lifecycle object to the `s3.BucketV2` resource of the source bucket.
This resource implements the same features that are provided by the `lifecycle_rule` object of the `s3.BucketV2` resource. To avoid conflicts or unexpected apply results, a lifecycle configuration is needed on the `s3.BucketV2` to ignore changes to the internal `lifecycle_rule` object. Failure to add the `lifecycle` configuration to the `s3.BucketV2` will result in conflicting state results.
```python
import pulumi
```
The `s3.BucketLifecycleConfigurationV2` resource provides the following features that are not available in the `s3.BucketV2` resource:
* `filter` - Added to the `rule` configuration block documented below.
## Import
S3 bucket lifecycle configuration can be imported using the `bucket`, e.g.
```sh
$ pulumi import aws:s3/bucketLifecycleConfigurationV2:BucketLifecycleConfigurationV2 example bucket-name
```
In addition, S3 bucket lifecycle configuration can be imported using the `bucket` and `expected_bucket_owner` separated by a comma (`,`) e.g.,
```sh
$ pulumi import aws:s3/bucketLifecycleConfigurationV2:BucketLifecycleConfigurationV2 example bucket-name,123456789012
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] bucket: The name of the source S3 bucket you want Amazon S3 to monitor.
:param pulumi.Input[str] expected_bucket_owner: The account ID of the expected bucket owner. If the bucket is owned by a different account, the request will fail with an HTTP 403 (Access Denied) error.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BucketLifecycleConfigurationV2RuleArgs']]]] rules: List of configuration blocks describing the rules managing the replication documented below.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: BucketLifecycleConfigurationV2Args,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides an independent configuration resource for S3 bucket [lifecycle configuration](https://docs.aws.amazon.com/AmazonS3/latest/userguide/object-lifecycle-mgmt.html).
## Example Usage
```python
import pulumi
import pulumi_aws as aws
bucket = aws.s3.BucketV2("bucket")
bucket_acl = aws.s3.BucketAclV2("bucketAcl",
bucket=bucket.id,
acl="private")
bucket_config = aws.s3.BucketLifecycleConfigurationV2("bucket-config",
bucket=bucket.bucket,
rules=[
aws.s3.BucketLifecycleConfigurationV2RuleArgs(
id="log",
expiration=aws.s3.BucketLifecycleConfigurationV2RuleExpirationArgs(
days=90,
),
filter=aws.s3.BucketLifecycleConfigurationV2RuleFilterArgs(
and_=aws.s3.BucketLifecycleConfigurationV2RuleFilterAndArgs(
prefix="log/",
tags={
"rule": "log",
"autoclean": "true",
},
),
),
status="Enabled",
transitions=[
aws.s3.BucketLifecycleConfigurationV2RuleTransitionArgs(
days=30,
storage_class="STANDARD_IA",
),
aws.s3.BucketLifecycleConfigurationV2RuleTransitionArgs(
days=60,
storage_class="GLACIER",
),
],
),
aws.s3.BucketLifecycleConfigurationV2RuleArgs(
id="tmp",
filter=aws.s3.BucketLifecycleConfigurationV2RuleFilterArgs(
prefix="tmp/",
),
expiration=aws.s3.BucketLifecycleConfigurationV2RuleExpirationArgs(
date="2023-01-13T00:00:00Z",
),
status="Enabled",
),
])
versioning_bucket = aws.s3.BucketV2("versioningBucket")
versioning_bucket_acl = aws.s3.BucketAclV2("versioningBucketAcl",
bucket=versioning_bucket.id,
acl="private")
versioning = aws.s3.BucketVersioningV2("versioning",
bucket=versioning_bucket.id,
versioning_configuration=aws.s3.BucketVersioningV2VersioningConfigurationArgs(
status="Enabled",
))
versioning_bucket_config = aws.s3.BucketLifecycleConfigurationV2("versioning-bucket-config",
bucket=versioning_bucket.bucket,
rules=[aws.s3.BucketLifecycleConfigurationV2RuleArgs(
id="config",
filter=aws.s3.BucketLifecycleConfigurationV2RuleFilterArgs(
prefix="config/",
),
noncurrent_version_expiration=aws.s3.BucketLifecycleConfigurationV2RuleNoncurrentVersionExpirationArgs(
noncurrent_days=90,
),
noncurrent_version_transitions=[
aws.s3.BucketLifecycleConfigurationV2RuleNoncurrentVersionTransitionArgs(
noncurrent_days=30,
storage_class="STANDARD_IA",
),
aws.s3.BucketLifecycleConfigurationV2RuleNoncurrentVersionTransitionArgs(
noncurrent_days=60,
storage_class="GLACIER",
),
],
status="Enabled",
)],
opts=pulumi.ResourceOptions(depends_on=[versioning]))
```
## Usage Notes
> **NOTE:** To avoid conflicts always add the following lifecycle object to the `s3.BucketV2` resource of the source bucket.
This resource implements the same features that are provided by the `lifecycle_rule` object of the `s3.BucketV2` resource. To avoid conflicts or unexpected apply results, a lifecycle configuration is needed on the `s3.BucketV2` to ignore changes to the internal `lifecycle_rule` object. Failure to add the `lifecycle` configuration to the `s3.BucketV2` will result in conflicting state results.
```python
import pulumi
```
The `s3.BucketLifecycleConfigurationV2` resource provides the following features that are not available in the `s3.BucketV2` resource:
* `filter` - Added to the `rule` configuration block documented below.
## Import
S3 bucket lifecycle configuration can be imported using the `bucket`, e.g.
```sh
$ pulumi import aws:s3/bucketLifecycleConfigurationV2:BucketLifecycleConfigurationV2 example bucket-name
```
In addition, S3 bucket lifecycle configuration can be imported using the `bucket` and `expected_bucket_owner` separated by a comma (`,`) e.g.,
```sh
$ pulumi import aws:s3/bucketLifecycleConfigurationV2:BucketLifecycleConfigurationV2 example bucket-name,123456789012
```
:param str resource_name: The name of the resource.
:param BucketLifecycleConfigurationV2Args args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(BucketLifecycleConfigurationV2Args, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
bucket: Optional[pulumi.Input[str]] = None,
expected_bucket_owner: Optional[pulumi.Input[str]] = None,
rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BucketLifecycleConfigurationV2RuleArgs']]]]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = BucketLifecycleConfigurationV2Args.__new__(BucketLifecycleConfigurationV2Args)
if bucket is None and not opts.urn:
raise TypeError("Missing required property 'bucket'")
__props__.__dict__["bucket"] = bucket
__props__.__dict__["expected_bucket_owner"] = expected_bucket_owner
if rules is None and not opts.urn:
raise TypeError("Missing required property 'rules'")
__props__.__dict__["rules"] = rules
super(BucketLifecycleConfigurationV2, __self__).__init__(
'aws:s3/bucketLifecycleConfigurationV2:BucketLifecycleConfigurationV2',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
bucket: Optional[pulumi.Input[str]] = None,
expected_bucket_owner: Optional[pulumi.Input[str]] = None,
rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BucketLifecycleConfigurationV2RuleArgs']]]]] = None) -> 'BucketLifecycleConfigurationV2':
"""
Get an existing BucketLifecycleConfigurationV2 resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] bucket: The name of the source S3 bucket you want Amazon S3 to monitor.
:param pulumi.Input[str] expected_bucket_owner: The account ID of the expected bucket owner. If the bucket is owned by a different account, the request will fail with an HTTP 403 (Access Denied) error.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BucketLifecycleConfigurationV2RuleArgs']]]] rules: List of configuration blocks describing the rules managing the replication documented below.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _BucketLifecycleConfigurationV2State.__new__(_BucketLifecycleConfigurationV2State)
__props__.__dict__["bucket"] = bucket
__props__.__dict__["expected_bucket_owner"] = expected_bucket_owner
__props__.__dict__["rules"] = rules
return BucketLifecycleConfigurationV2(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def bucket(self) -> pulumi.Output[str]:
"""
The name of the source S3 bucket you want Amazon S3 to monitor.
"""
return pulumi.get(self, "bucket")
@property
@pulumi.getter(name="expectedBucketOwner")
def expected_bucket_owner(self) -> pulumi.Output[Optional[str]]:
"""
The account ID of the expected bucket owner. If the bucket is owned by a different account, the request will fail with an HTTP 403 (Access Denied) error.
"""
return pulumi.get(self, "expected_bucket_owner")
@property
@pulumi.getter
def rules(self) -> pulumi.Output[Sequence['outputs.BucketLifecycleConfigurationV2Rule']]:
"""
List of configuration blocks describing the rules managing the replication documented below.
"""
return pulumi.get(self, "rules")
| 48.192308
| 403
| 0.629112
| 2,143
| 22,554
| 6.461503
| 0.118059
| 0.044486
| 0.054886
| 0.023832
| 0.845237
| 0.830144
| 0.819239
| 0.798512
| 0.791579
| 0.789846
| 0
| 0.017375
| 0.28802
| 22,554
| 467
| 404
| 48.295503
| 0.844937
| 0.568325
| 0
| 0.574324
| 1
| 0
| 0.145296
| 0.095778
| 0
| 0
| 0
| 0
| 0
| 1
| 0.148649
| false
| 0.006757
| 0.047297
| 0
| 0.283784
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c003ed373ed61a9bc7f0aa49dd8461187422c3c
| 98
|
py
|
Python
|
tests/basics/builtin_abs.py
|
geowor01/micropython
|
7fb13eeef4a85f21cae36f1d502bcc53880e1815
|
[
"MIT"
] | 7
|
2019-10-18T13:41:39.000Z
|
2022-03-15T17:27:57.000Z
|
tests/basics/builtin_abs.py
|
geowor01/micropython
|
7fb13eeef4a85f21cae36f1d502bcc53880e1815
|
[
"MIT"
] | null | null | null |
tests/basics/builtin_abs.py
|
geowor01/micropython
|
7fb13eeef4a85f21cae36f1d502bcc53880e1815
|
[
"MIT"
] | 2
|
2020-06-23T09:10:15.000Z
|
2020-12-22T06:42:14.000Z
|
# test builtin abs
print(abs(False))
print(abs(True))
print(abs(1))
print(abs(-1))
print("PASS")
| 12.25
| 18
| 0.673469
| 17
| 98
| 3.882353
| 0.470588
| 0.484848
| 0.272727
| 0.424242
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.022727
| 0.102041
| 98
| 8
| 19
| 12.25
| 0.727273
| 0.163265
| 0
| 0
| 0
| 0
| 0.049383
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
|
0
| 8
|
0c069b4d5d354f195fcb7e63fdbdbb816445bb6a
| 21,283
|
py
|
Python
|
blender/SceneExporter.py
|
DaSutt/VolumetricParticles
|
6ec9bac4bec4a8757343bb770b23110ef2364dfd
|
[
"Apache-2.0"
] | 6
|
2017-06-26T11:42:26.000Z
|
2018-09-10T17:53:53.000Z
|
blender/SceneExporter.py
|
DaSutt/VolumetricParticles
|
6ec9bac4bec4a8757343bb770b23110ef2364dfd
|
[
"Apache-2.0"
] | 8
|
2017-06-24T20:25:42.000Z
|
2017-08-09T10:50:40.000Z
|
blender/SceneExporter.py
|
DaSutt/VolumetricParticles
|
6ec9bac4bec4a8757343bb770b23110ef2364dfd
|
[
"Apache-2.0"
] | null | null | null |
#MIT License
#
# Copyright (c) 2017 Daniel Suttor
#
#Permission is hereby granted, free of charge, to any person obtaining a copy
#of this software and associated documentation files (the "Software"), to deal
#in the Software without restriction, including without limitation the rights
#to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
#copies of the Software, and to permit persons to whom the Software is
#furnished to do so, subject to the following conditions:
#
#The above copyright notice and this permission notice shall be included in all
#copies or substantial portions of the Software.
#
#THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
#IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
#FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
#AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
#LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
#OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
#SOFTWARE.
bl_info = {
"name": "Scene exporter",
"author": "Daniel Suttor",
"location": "File->Import-Export",
"category": "Import-Export",
}
import bpy
import bmesh
import struct
import mathutils
import sys
import subprocess
import os
from collections import namedtuple
from array import array
from collections import defaultdict
from enum import IntEnum
class ComponentType(IntEnum):
transform = 0
mesh = 1
boundingBox = 2
camera = 3
class SceneExporter(bpy.types.Operator):
"""Export Scene""" # tooltip for menu items and buttons
bl_idname = "export_scene.scene" # unique identifier for buttons, menu items to reference
bl_label = "Export the current scene" # display name in interface
bl_options = {'REGISTER', 'PRESET'}
#changes the coordinate from to left handed for rotation
def ChangeRotation(originalRot):
eulerAngles = originalRot.to_euler('XYZ')
rot = mathutils.Euler((-eulerAngles.x, -eulerAngles.y, eulerAngles.z))
return rot.to_quaternion()
def CreateDataStrings(loc, rot, scale):
location = '\t\tpos %f %f %f \n' %(loc.x, -loc.z, loc.y)
rotation = '\t\trotation %f %f %f %f \n' %(rot.x, rot.z, rot.y, -rot.w)
scaling = '\t\tscaling %f %f %f' %(scale.x, scale.z, scale.y)
return location, rotation, scaling
def MinVector(min, input):
for i in range(0,3):
if input[i] < min[i]:
min[i] = input[i]
def MaxVector(max, input):
for i in range(0,3):
if input[i] > max[i]:
max[i] = input[i]
def ExportTransformComponent(matrix, file):
loc, originalRot, scale = matrix.decompose()
rot = SceneExporter.ChangeRotation(originalRot)
locS, rotS, scaleS = SceneExporter.CreateDataStrings(loc, rot, scale)
file.write( '\ttransform\n'
+ locS + rotS + scaleS + '\n')
def ExportMeshComponent(path, object, file):
file.write( '\tpath\t' + path + object.data.name + '.ply\n')
file.write('\tmaterialName ' + object.material_slots[0].name + '\n')
#triangulate mesh first
tempMesh = bmesh.new()
tempMesh.from_mesh(object.data)
bmesh.ops.triangulate(tempMesh, faces=tempMesh.faces[:], quad_method=0, ngon_method=0)
tempMesh.to_mesh(object.data)
tempMesh.free()
#save without transform
transform = object.matrix_world.copy()
object.matrix_world = mathutils.Matrix.Identity(4)
bpy.ops.export_mesh.ply(filepath=path+object.data.name+'.ply',
axis_forward="Z", axis_up="-Y")
#reset transform
object.matrix_world = transform
def ExportBoundingBoxComponent(object, file):
maxFloat = sys.float_info.max
minV = mathutils.Vector((maxFloat, maxFloat, maxFloat))
maxV = mathutils.Vector((-maxFloat, -maxFloat, -maxFloat))
for corner in object.bound_box:
cornerValue = mathutils.Vector((corner[0], -corner[2], corner[1]))
SceneExporter.MinVector(minV, cornerValue)
SceneExporter.MaxVector(maxV, cornerValue)
file.write( '\tboundingBox\n\t\tmin %f %f %f\n\t\tmax %f %f %f\n' %(
minV.x, minV.y, minV.z,
maxV.x, maxV.y, maxV.z))
def ExportCameraComponent(file):
file.write('%d \n' %(ComponentType.camera))
def GetTextureImage(object):
if len(object.material_slots) > 0:
material = object.material_slots[0].material
if len(material.texture_slots) > 0:
texture = material.texture_slots[0].texture
if hasattr(texture, 'image'):
return material.texture_slots[0].texture.image
def ExportParticleComponent(object, file):
settings = object.particle_systems[0].settings
file.write('\tcount %d\n\tlifetime %f\n' %(settings.count, settings.lifetime))
image = SceneExporter.GetTextureImage(object)
if hasattr(image, 'filepath'):
file.write('\ttextureFile ' + bpy.path.abspath(image.filepath) + '\n')
def ExportMaterials(file):
for mat in bpy.data.materials:
file.write('material\n\tname ' + mat.name + '\n')
texture = mat.active_texture
if hasattr(texture, 'image'):
file.write('\tbaseColorTexture ' + texture.image.filepath)
else:
file.write('\tbaseColorTexture NONE')
file.write('\n\troughness %f\n\tmetalMask %d\n' %(0.5, 0))
#exports the instance data of the objects
def exportScene(scene, path, name):
scenePath = str(path + name)
file = open(scenePath, 'w')
relativePath = os.path.relpath(path)
for obj in scene.objects:
if not obj.hide:
if obj.type == 'MESH':
obj.select = True
scene.objects.active = obj
if len(obj.particle_systems) > 0:
file.write('particles\n')
SceneExporter.ExportParticleComponent(obj, file)
SceneExporter.ExportTransformComponent(obj.matrix_world, file)
SceneExporter.ExportBoundingBoxComponent(obj, file)
else:
file.write('mesh\n')
SceneExporter.ExportMeshComponent(path, obj, file)
SceneExporter.ExportTransformComponent(obj.matrix_world, file)
SceneExporter.ExportBoundingBoxComponent(obj, file)
SceneExporter.ExportMaterials(file)
file.close()
def GetOutputName():
blendName = bpy.path.basename(bpy.data.filepath)
return blendName.split('.',1)[0] + ".scene"
def execute(self, context):
#location of the blend file
path = bpy.path.abspath("//")
if(bpy.data.is_saved == False):
return {'FINISHED'}
scene = context.scene
sceneName = SceneExporter.GetOutputName()
#export all instances of the mesh data
SceneExporter.exportScene(scene, path, sceneName)
return {'FINISHED'} # operator finished successfully
def menu_func(self, context):
self.layout.operator(SceneExporter.bl_idname, text="Scene exporter(.scene)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(menu_func)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(menu_func)
# for testing running directly out of editor
if __name__ == "__main__":
register()
bl_info = {
"name": "Scene exporter",
"author": "Daniel Suttor",
"location": "File->Import-Export",
"category": "Import-Export",
}
import bpy
import bmesh
import struct
import mathutils
import sys
import subprocess
import os
from collections import namedtuple
from array import array
from collections import defaultdict
from enum import IntEnum
class ComponentType(IntEnum):
transform = 0
mesh = 1
boundingBox = 2
camera = 3
class SceneExporter(bpy.types.Operator):
"""Export Scene""" # tooltip for menu items and buttons
bl_idname = "export_scene.scene" # unique identifier for buttons, menu items to reference
bl_label = "Export the current scene" # display name in interface
bl_options = {'REGISTER', 'PRESET'}
#changes the coordinate from to left handed for rotation
def ChangeRotation(originalRot):
eulerAngles = originalRot.to_euler('XYZ')
rot = mathutils.Euler((-eulerAngles.x, -eulerAngles.y, eulerAngles.z))
return rot.to_quaternion()
def CreateDataStrings(loc, rot, scale):
location = '\t\tpos %f %f %f \n' %(loc.x, -loc.z, loc.y)
rotation = '\t\trotation %f %f %f %f \n' %(rot.x, rot.z, rot.y, -rot.w)
scaling = '\t\tscaling %f %f %f' %(scale.x, scale.z, scale.y)
return location, rotation, scaling
def MinVector(min, input):
for i in range(0,3):
if input[i] < min[i]:
min[i] = input[i]
def MaxVector(max, input):
for i in range(0,3):
if input[i] > max[i]:
max[i] = input[i]
def ExportTransformComponent(matrix, file):
loc, originalRot, scale = matrix.decompose()
rot = SceneExporter.ChangeRotation(originalRot)
locS, rotS, scaleS = SceneExporter.CreateDataStrings(loc, rot, scale)
file.write( '\ttransform\n'
+ locS + rotS + scaleS + '\n')
def ExportMeshComponent(path, object, file):
file.write( '\tpath\t' + object.data.name + '.ply\n')
file.write('\tmaterialName ' + object.material_slots[0].name + '\n')
#triangulate mesh first
tempMesh = bmesh.new()
tempMesh.from_mesh(object.data)
bmesh.ops.triangulate(tempMesh, faces=tempMesh.faces[:], quad_method=0, ngon_method=0)
tempMesh.to_mesh(object.data)
tempMesh.free()
#save without transform
transform = object.matrix_world.copy()
object.matrix_world = mathutils.Matrix.Identity(4)
bpy.ops.export_mesh.ply(filepath=path+object.data.name+'.ply',
axis_forward="Z", axis_up="-Y")
#reset transform
object.matrix_world = transform
def ExportBoundingBoxComponent(object, file):
maxFloat = sys.float_info.max
minV = mathutils.Vector((maxFloat, maxFloat, maxFloat))
maxV = mathutils.Vector((-maxFloat, -maxFloat, -maxFloat))
for corner in object.bound_box:
cornerValue = mathutils.Vector((corner[0], -corner[2], corner[1]))
SceneExporter.MinVector(minV, cornerValue)
SceneExporter.MaxVector(maxV, cornerValue)
file.write( '\tboundingBox\n\t\tmin %f %f %f\n\t\tmax %f %f %f\n' %(
minV.x, minV.y, minV.z,
maxV.x, maxV.y, maxV.z))
def ExportCameraComponent(file):
file.write('%d \n' %(ComponentType.camera))
def GetTextureImage(object):
if len(object.material_slots) > 0:
material = object.material_slots[0].material
if len(material.texture_slots) > 0:
texture = material.texture_slots[0].texture
if hasattr(texture, 'image'):
return material.texture_slots[0].texture.image
def ExportParticleComponent(object, file):
settings = object.particle_systems[0].settings
file.write('\tcount %d\n\tlifetime %f\n' %(settings.count, settings.lifetime))
image = SceneExporter.GetTextureImage(object)
if hasattr(image, 'filepath'):
file.write('\ttextureFile ' + bpy.path.abspath(image.filepath) + '\n')
def ExportMaterials(file):
for mat in bpy.data.materials:
file.write('material\n\tname ' + mat.name + '\n')
texture = mat.active_texture
if hasattr(texture, 'image'):
filepath = texture.image.filepath
if texture.image.filepath[0] is '/':
filepath = texture.image.filepath[2:]
file.write('\tbaseColorTexture ' + filepath)
else:
file.write('\tbaseColorTexture NONE')
file.write('\n\troughness %f\n\tmetalMask %d\n' %(0.5, 0))
#exports the instance data of the objects
def exportScene(scene, path, name):
scenePath = str(path + name)
file = open(scenePath, 'w')
for obj in scene.objects:
if not obj.hide:
if obj.type == 'MESH':
obj.select = True
scene.objects.active = obj
if len(obj.particle_systems) > 0:
file.write('particles\n')
SceneExporter.ExportParticleComponent(obj, file)
SceneExporter.ExportTransformComponent(obj.matrix_world, file)
SceneExporter.ExportBoundingBoxComponent(obj, file)
else:
file.write('mesh\n')
SceneExporter.ExportMeshComponent(path, obj, file)
SceneExporter.ExportTransformComponent(obj.matrix_world, file)
SceneExporter.ExportBoundingBoxComponent(obj, file)
SceneExporter.ExportMaterials(file)
file.close()
def GetOutputName():
blendName = bpy.path.basename(bpy.data.filepath)
return blendName.split('.',1)[0] + ".scene"
def execute(self, context):
#location of the blend file
path = bpy.path.abspath("//")
if(bpy.data.is_saved == False):
return {'FINISHED'}
scene = context.scene
sceneName = SceneExporter.GetOutputName()
#export all instances of the mesh data
SceneExporter.exportScene(scene, path, sceneName)
return {'FINISHED'} # operator finished successfully
def menu_func(self, context):
self.layout.operator(SceneExporter.bl_idname, text="Scene exporter(.scene)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(menu_func)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(menu_func)
# for testing running directly out of editor
if __name__ == "__main__":
register()
bl_info = {
"name": "Scene exporter",
"author": "Daniel Suttor",
"location": "File->Import-Export",
"category": "Import-Export",
}
import bpy
import bmesh
import struct
import mathutils
import sys
import subprocess
import os
from collections import namedtuple
from array import array
from collections import defaultdict
from enum import IntEnum
class ComponentType(IntEnum):
transform = 0
mesh = 1
boundingBox = 2
camera = 3
class SceneExporter(bpy.types.Operator):
"""Export Scene""" # tooltip for menu items and buttons
bl_idname = "export_scene.scene" # unique identifier for buttons, menu items to reference
bl_label = "Export the current scene" # display name in interface
bl_options = {'REGISTER', 'PRESET'}
#changes the coordinate from to left handed for rotation
def ChangeRotation(originalRot):
eulerAngles = originalRot.to_euler('XYZ')
rot = mathutils.Euler((-eulerAngles.x, -eulerAngles.y, eulerAngles.z))
return rot.to_quaternion()
def CreateDataStrings(loc, rot, scale):
location = '\t\tpos %f %f %f \n' %(loc.x, -loc.z, loc.y)
rotation = '\t\trotation %f %f %f %f \n' %(rot.x, rot.z, rot.y, -rot.w)
scaling = '\t\tscaling %f %f %f' %(scale.x, scale.z, scale.y)
return location, rotation, scaling
def MinVector(min, input):
for i in range(0,3):
if input[i] < min[i]:
min[i] = input[i]
def MaxVector(max, input):
for i in range(0,3):
if input[i] > max[i]:
max[i] = input[i]
def ExportTransformComponent(matrix, file):
loc, originalRot, scale = matrix.decompose()
rot = SceneExporter.ChangeRotation(originalRot)
locS, rotS, scaleS = SceneExporter.CreateDataStrings(loc, rot, scale)
file.write( '\ttransform\n'
+ locS + rotS + scaleS + '\n')
def ExportMeshComponent(path, object, file):
file.write( '\tpath\t' + object.data.name + '.ply\n')
file.write('\tmaterialName ' + object.material_slots[0].name + '\n')
#triangulate mesh first
tempMesh = bmesh.new()
tempMesh.from_mesh(object.data)
bmesh.ops.triangulate(tempMesh, faces=tempMesh.faces[:], quad_method=0, ngon_method=0)
tempMesh.to_mesh(object.data)
tempMesh.free()
#save without transform
transform = object.matrix_world.copy()
object.matrix_world = mathutils.Matrix.Identity(4)
bpy.ops.export_mesh.ply(filepath=path+object.data.name+'.ply',
axis_forward="Z", axis_up="-Y")
#reset transform
object.matrix_world = transform
def ExportBoundingBoxComponent(object, file):
maxFloat = sys.float_info.max
minV = mathutils.Vector((maxFloat, maxFloat, maxFloat))
maxV = mathutils.Vector((-maxFloat, -maxFloat, -maxFloat))
for corner in object.bound_box:
cornerValue = mathutils.Vector((corner[0], -corner[2], corner[1]))
SceneExporter.MinVector(minV, cornerValue)
SceneExporter.MaxVector(maxV, cornerValue)
file.write( '\tboundingBox\n\t\tmin %f %f %f\n\t\tmax %f %f %f\n' %(
minV.x, minV.y, minV.z,
maxV.x, maxV.y, maxV.z))
def ExportCameraComponent(file):
file.write('%d \n' %(ComponentType.camera))
def GetTextureImage(object):
if len(object.material_slots) > 0:
material = object.material_slots[0].material
if len(material.texture_slots) > 0:
texture = material.texture_slots[0].texture
if hasattr(texture, 'image'):
return material.texture_slots[0].texture.image
def ExportParticleComponent(object, file):
settings = object.particle_systems[0].settings
file.write('\tcount %d\n\tlifetime %f\n' %(settings.count, settings.lifetime))
image = SceneExporter.GetTextureImage(object)
if hasattr(image, 'filepath'):
file.write('\ttextureFile ' + bpy.path.abspath(image.filepath) + '\n')
def ExportMaterials(file):
for mat in bpy.data.materials:
file.write('material\n\tname ' + mat.name + '\n')
texture = mat.active_texture
if hasattr(texture, 'image'):
filepath = texture.image.filepath
if texture.image.filepath[0] is '/':
filepath = texture.image.filepath[2:]
file.write('\tbaseColorTexture ' + filepath)
else:
file.write('\tbaseColorTexture NONE')
file.write('\n\troughness %f\n\tmetalMask %d\n' %(0.5, 0))
def ExportSmokeComponent(object, file):
smoke = object.modifiers['Smoke']
file.write('\tdensity %f\n' %(smoke.domain_settings.alpha))
#exports the instance data of the objects
def exportScene(scene, path, name):
scenePath = str(path + name)
file = open(scenePath, 'w')
for obj in scene.objects:
if not obj.hide:
if obj.type == 'MESH':
obj.select = True
scene.objects.active = obj
if len(obj.particle_systems) > 0:
file.write('particles\n')
SceneExporter.ExportParticleComponent(obj, file)
elif 'Smoke' in obj.modifiers:
file.write('smoke\n')
SceneExporter.ExportSmokeComponent(obj, file)
else:
file.write('mesh\n')
SceneExporter.ExportMeshComponent(path, obj, file)
SceneExporter.ExportTransformComponent(obj.matrix_world, file)
SceneExporter.ExportBoundingBoxComponent(obj, file)
SceneExporter.ExportMaterials(file)
file.close()
def GetOutputName():
blendName = bpy.path.basename(bpy.data.filepath)
return blendName.split('.',1)[0] + ".scene"
def execute(self, context):
#location of the blend file
path = bpy.path.abspath("//")
if(bpy.data.is_saved == False):
return {'FINISHED'}
scene = context.scene
sceneName = SceneExporter.GetOutputName()
#export all instances of the mesh data
SceneExporter.exportScene(scene, path, sceneName)
return {'FINISHED'} # operator finished successfully
def menu_func(self, context):
self.layout.operator(SceneExporter.bl_idname, text="Scene exporter(.scene)")
def register():
bpy.utils.register_module(__name__)
bpy.types.INFO_MT_file_export.append(menu_func)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(menu_func)
# for testing running directly out of editor
if __name__ == "__main__":
register()
| 36.25724
| 101
| 0.623455
| 2,463
| 21,283
| 5.315469
| 0.126675
| 0.028185
| 0.004125
| 0.003666
| 0.915903
| 0.915597
| 0.915597
| 0.915597
| 0.915597
| 0.915597
| 0
| 0.005668
| 0.262228
| 21,283
| 587
| 102
| 36.25724
| 0.828111
| 0.108537
| 0
| 0.971765
| 0
| 0.007059
| 0.089356
| 0.003492
| 0
| 0
| 0
| 0
| 0
| 1
| 0.122353
| false
| 0
| 0.091765
| 0
| 0.32
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c0fe317a62ff5ced371f926863ed78a82534bb4
| 5,417
|
py
|
Python
|
EDA/웹.py
|
cpprhtn/AI-s-Morality-and-Ethics-Learning-Project
|
4e939a0eb5883ea7f669f33b6ce70abcb8a90d3d
|
[
"MIT"
] | 2
|
2021-01-19T06:18:34.000Z
|
2021-02-03T10:59:24.000Z
|
EDA/웹.py
|
cpprhtn/AI-s-Morality-and-Ethics-Learning-Project
|
4e939a0eb5883ea7f669f33b6ce70abcb8a90d3d
|
[
"MIT"
] | null | null | null |
EDA/웹.py
|
cpprhtn/AI-s-Morality-and-Ethics-Learning-Project
|
4e939a0eb5883ea7f669f33b6ce70abcb8a90d3d
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Sep 11 20:42:48 2020
@author: cpprhtn
"""
frames = []
df_list = []
import json
import pandas as pd
def read_json(a):
if a < 1:
b = 'Error.json'
elif a < 10:
b = 'EBRW190300000{}.json'.format(a)
elif a < 100:
b = 'EBRW19030000{}.json'.format(a)
elif a < 1000:
b = 'EBRW1903000{}.json'.format(a)
elif a < 10000:
b = 'EBRW190300{}.json'.format(a)
else:
b = 'Error.json'
data = json.load(open(b))
data2 = data["document"]
result = []
for d in data2:
result.extend(d['paragraph'])
globals()['df{}'.format(a)] = pd.DataFrame(result)
for i in range(2753,2764):
try:
read_json(i)
df_list.append(i)
except FileNotFoundError:
print("i =", i,"\n 파일이 존재하지 않으므로 건너뜁니다.")
for k in df_list:
frames.append(globals()['df{}'.format(k)])
df0 = pd.concat(frames)
df0.to_csv("web_1.csv")
frames = []
df_list = []
import json
import pandas as pd
def read_json(a):
if a < 1:
b = 'Error.json'
elif a < 10:
b = 'EBRW190800000{}.json'.format(a)
elif a < 100:
b = 'EBRW19080000{}.json'.format(a)
elif a < 1000:
b = 'EBRW1908000{}.json'.format(a)
else:
b = 'Error.json'
data = json.load(open(b))
data2 = data["document"]
result = []
for d in data2:
result.extend(d['paragraph'])
globals()['df{}'.format(a)] = pd.DataFrame(result)
for i in range(1,308):
try:
read_json(i)
df_list.append(i)
except FileNotFoundError:
print("i =", i,"\n 파일이 존재하지 않으므로 건너뜁니다.")
for k in df_list:
frames.append(globals()['df{}'.format(k)])
df0 = pd.concat(frames)
df0.to_csv("web_2.csv")
frames = []
df_list = []
import json
import pandas as pd
def read_json(a):
if a < 1:
b = 'Error.json'
elif a < 10:
b = 'EBRW190810000{}.json'.format(a)
elif a < 100:
b = 'EBRW19081000{}.json'.format(a)
elif a < 1000:
b = 'EBRW1908100{}.json'.format(a)
else:
b = 'Error.json'
data = json.load(open(b))
data2 = data["document"]
result = []
for d in data2:
result.extend(d['paragraph'])
globals()['df{}'.format(a)] = pd.DataFrame(result)
for i in range(1,272):
try:
read_json(i)
df_list.append(i)
except FileNotFoundError:
print("i =", i,"\n 파일이 존재하지 않으므로 건너뜁니다.")
for k in df_list:
frames.append(globals()['df{}'.format(k)])
df0 = pd.concat(frames)
df0.to_csv("web_3.csv")
frames = []
df_list = []
import json
import pandas as pd
def read_json(a):
if a < 1:
b = 'Error.json'
elif a < 10:
b = 'ERRW190500000{}.json'.format(a)
elif a < 100:
b = 'ERRW19050000{}.json'.format(a)
elif a < 1000:
b = 'ERRW1905000{}.json'.format(a)
elif a < 10000:
b = 'ERRW190500{}.json'.format(a)
else:
b = 'Error.json'
data = json.load(open(b))
data2 = data["document"]
result = []
for d in data2:
result.extend(d['paragraph'])
globals()['df{}'.format(a)] = pd.DataFrame(result)
for i in range(308,2780):
try:
read_json(i)
df_list.append(i)
except FileNotFoundError:
print("i =", i,"\n 파일이 존재하지 않으므로 건너뜁니다.")
for k in df_list:
frames.append(globals()['df{}'.format(k)])
df0 = pd.concat(frames)
df0.to_csv("web_4.csv")
frames = []
df_list = []
import json
import pandas as pd
def read_json(a):
if a < 1:
b = 'Error.json'
elif a < 10:
b = 'ESRW190500000{}.json'.format(a)
elif a < 100:
b = 'ESRW19050000{}.json'.format(a)
elif a < 1000:
b = 'ESRW1905000{}.json'.format(a)
elif a < 10000:
b = 'ESRW190500{}.json'.format(a)
else:
b = 'Error.json'
data = json.load(open(b))
data2 = data["document"]
result = []
for d in data2:
result.extend(d['paragraph'])
globals()['df{}'.format(a)] = pd.DataFrame(result)
for i in range(307,2782):
try:
read_json(i)
df_list.append(i)
except FileNotFoundError:
print("i =", i,"\n 파일이 존재하지 않으므로 건너뜁니다.")
for k in df_list:
frames.append(globals()['df{}'.format(k)])
df0 = pd.concat(frames)
df0.to_csv("web_5.csv")
frames = []
df_list = []
import json
import pandas as pd
def read_json(a):
if a < 1:
b = 'Error.json'
elif a < 10:
b = 'ESRW190510000{}.json'.format(a)
elif a < 100:
b = 'ESRW19051000{}.json'.format(a)
elif a < 1000:
b = 'ESRW1905100{}.json'.format(a)
elif a < 10000:
b = 'ESRW190510{}.json'.format(a)
else:
b = 'Error.json'
data = json.load(open(b))
data2 = data["document"]
result = []
for d in data2:
result.extend(d['paragraph'])
globals()['df{}'.format(a)] = pd.DataFrame(result)
for i in range(308,1171):
try:
read_json(i)
df_list.append(i)
except FileNotFoundError:
print("i =", i,"\n 파일이 존재하지 않으므로 건너뜁니다.")
for k in df_list:
frames.append(globals()['df{}'.format(k)])
df0 = pd.concat(frames)
df0.to_csv("web_6.csv")
| 20.441509
| 55
| 0.532213
| 746
| 5,417
| 3.808311
| 0.142091
| 0.06899
| 0.085181
| 0.084477
| 0.873284
| 0.873284
| 0.873284
| 0.75572
| 0.75572
| 0.75572
| 0
| 0.087199
| 0.303489
| 5,417
| 264
| 56
| 20.518939
| 0.665783
| 0.007938
| 0
| 0.824742
| 0
| 0
| 0.167577
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.061856
| null | null | 0.030928
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0c3123cdca64a5824a2f40f1e8d82c201030508a
| 122
|
py
|
Python
|
BuildSimHubAPI/experiment/__init__.py
|
ruijis/buildsimhub_python_api
|
67a88a421a5970b9134a97faf3d52a5a8a6c6258
|
[
"MIT"
] | 19
|
2018-02-27T22:58:04.000Z
|
2022-02-21T15:03:59.000Z
|
BuildSimHubAPI/experiment/__init__.py
|
ruijis/buildsimhub_python_api
|
67a88a421a5970b9134a97faf3d52a5a8a6c6258
|
[
"MIT"
] | 11
|
2018-02-15T16:47:53.000Z
|
2018-12-19T18:33:20.000Z
|
BuildSimHubAPI/experiment/__init__.py
|
ruijis/buildsimhub_python_api
|
67a88a421a5970b9134a97faf3d52a5a8a6c6258
|
[
"MIT"
] | 11
|
2018-01-26T02:12:38.000Z
|
2019-09-29T12:05:31.000Z
|
from .hvac_component import HVACComponent
from .hvac_component import AHUCoilComponent
from .AHU_builder import AHUBuilder
| 40.666667
| 44
| 0.885246
| 15
| 122
| 7
| 0.6
| 0.152381
| 0.32381
| 0.438095
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090164
| 122
| 3
| 45
| 40.666667
| 0.945946
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0c5cc320aea13f26326e809ef180c9be104b26b0
| 82
|
py
|
Python
|
src/chapter13/__init__.py
|
Peefy/CLRS_dugu_code-master
|
98f00e75e1b0ebc13a7affb2604bec8501692a19
|
[
"Apache-2.0"
] | 3
|
2018-01-31T03:08:50.000Z
|
2018-04-25T12:57:01.000Z
|
src/chapter13/__init__.py
|
HideLakitu/IntroductionToAlgorithm.Python
|
33662f46dc346203b220d7481d1a4439feda05d2
|
[
"Apache-2.0"
] | null | null | null |
src/chapter13/__init__.py
|
HideLakitu/IntroductionToAlgorithm.Python
|
33662f46dc346203b220d7481d1a4439feda05d2
|
[
"Apache-2.0"
] | 3
|
2019-03-03T04:49:53.000Z
|
2020-07-13T10:18:58.000Z
|
# python src/chapter13/chapter13note.py
# python3 src/chapter13/chapter13note.py
| 20.5
| 40
| 0.817073
| 10
| 82
| 6.7
| 0.6
| 0.358209
| 0.746269
| 0.80597
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12
| 0.085366
| 82
| 3
| 41
| 27.333333
| 0.773333
| 0.926829
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 0
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7527df57cffda9dcb542cd77ded368a8efc7cac
| 18,782
|
py
|
Python
|
apps/secure_url/tests/tests_secured_entity_access_view.py
|
fryta/sercure-url
|
06029e8e3a95616f939f62f04c260d14d128f0b4
|
[
"MIT"
] | null | null | null |
apps/secure_url/tests/tests_secured_entity_access_view.py
|
fryta/sercure-url
|
06029e8e3a95616f939f62f04c260d14d128f0b4
|
[
"MIT"
] | 7
|
2020-02-11T23:49:48.000Z
|
2022-01-13T01:05:42.000Z
|
apps/secure_url/tests/tests_secured_entity_access_view.py
|
fryta/secure-url
|
06029e8e3a95616f939f62f04c260d14d128f0b4
|
[
"MIT"
] | null | null | null |
from rest_framework import status
from django.urls.base import reverse
from django.conf import settings
from datetime import timedelta
from .tests_base_view import BaseViewTest
from ..models import SecuredEntity
class SecuredEntityAccessViewTest(BaseViewTest):
def __finish_create_secured_entity(self, response):
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
self.secured_entity = SecuredEntity.objects.first()
self.access_url = reverse('secure_url:secured-entity-access-view', args=(self.secured_entity.pk,))
self.client.logout()
def _create_secured_entity_from_url(self):
self._login_user()
response = self.client.post(self.create_url, self.data_with_url)
self.__finish_create_secured_entity(response)
def _create_secured_entity_from_file(self):
self._login_user()
self.tmp_file = self._get_tmp_file()
with open(self.tmp_file.name, 'rb') as file:
response = self.client.post(self.create_url, {'file': file})
self.__finish_create_secured_entity(response)
def test_access_secured_entity_from_url_returns_correct_response__authorized(self):
self._create_secured_entity_from_url()
self._login_user()
response = self.client.get(self.access_url)
self.assertContains(response, 'Please provide password in order to access this secured entity.')
self.assertContains(response, '<input id="id_password" name="password" type="text">')
self.assertContains(response, 'Go go go!')
def test_access_secured_entity_from_url_without_password_returns_correct_response__authorized(self):
self._create_secured_entity_from_url()
self._login_user()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk})
self.assertContains(response, 'This field is required')
def test_access_secured_entity_from_url_wrong_password_returns_correct_response__authorized(self):
self._create_secured_entity_from_url()
self._login_user()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk, 'password': 'xxx'})
self.assertContains(response, 'Password do not match')
def test_access_secured_entity_from_url_correct_password_results_in_302__authorized(self):
self._create_secured_entity_from_url()
self._login_user()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
def test_access_secured_entity_from_url_correct_password_returns_correct_response__authorized(self):
self._create_secured_entity_from_url()
self._login_user()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertEqual(response['Location'], self.secured_entity.url)
def test_access_secured_entity_from_url_correct_password_just_before_deadline_results_in_302__authorized(self):
self._create_secured_entity_from_url()
self._login_user()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME + timedelta(seconds=1))
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
def test_access_secured_entity_from_url_correct_password_just_before_deadline_returns_correct_response__authorized(self):
self._create_secured_entity_from_url()
self._login_user()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME + timedelta(seconds=1))
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertEqual(response['Location'], self.secured_entity.url)
def test_access_secured_entity_from_url_correct_password_just_after_deadline_returns_correct_response__authorized(self):
self._create_secured_entity_from_url()
self._login_user()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME - timedelta(seconds=1))
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertContains(response, 'Sorry, this secured entity is no longer available')
def test_access_secured_entity_from_url_just_after_deadline_returns_correct_response__authorized(self):
self._create_secured_entity_from_url()
self._login_user()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME - timedelta(seconds=1))
response = self.client.get(self.access_url)
self.assertContains(response, 'Sorry, this secured entity is no longer available')
def test_access_secured_entity_from_file_returns_correct_response__authorized(self):
self._create_secured_entity_from_file()
self._login_user()
response = self.client.get(self.access_url)
self.assertContains(response, 'Please provide password in order to access this secured entity.')
self.assertContains(response, '<input id="id_password" name="password" type="text">')
self.assertContains(response, 'Go go go!')
def test_access_secured_entity_from_file_without_password_returns_correct_response__authorized(self):
self._create_secured_entity_from_file()
self._login_user()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk})
self.assertContains(response, 'This field is required')
def test_access_secured_entity_from_file_wrong_password_returns_correct_response__authorized(self):
self._create_secured_entity_from_file()
self._login_user()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk, 'password': 'xxx'})
self.assertContains(response, 'Password do not match')
def test_access_secured_entity_from_file_correct_password_results_in_302__authorized(self):
self._create_secured_entity_from_file()
self._login_user()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
def test_access_secured_entity_from_file_correct_password_returns_correct_response__authorized(self):
self._create_secured_entity_from_file()
self._login_user()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertIn(settings.MEDIA_URL, response['Location'])
self.assertIn(self.tmp_file.name.split('/')[-1], response['Location'])
def test_access_secured_entity_from_file_correct_password_just_before_deadline_results_in_302__authorized(self):
self._create_secured_entity_from_file()
self._login_user()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME + timedelta(seconds=1))
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
def test_access_secured_entity_from_file_correct_password_just_before_deadline_returns_correct_response__authorized(self):
self._create_secured_entity_from_file()
self._login_user()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME + timedelta(seconds=1))
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertIn(settings.MEDIA_URL, response['Location'])
self.assertIn(self.tmp_file.name.split('/')[-1], response['Location'])
def test_access_secured_entity_from_file_correct_password_just_after_deadline_returns_correct_response__authorized(self):
self._create_secured_entity_from_file()
self._login_user()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME - timedelta(seconds=1))
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertContains(response, 'Sorry, this secured entity is no longer available')
def test_access_secured_entity_from_file_just_after_deadline_returns_correct_response__authorized(self):
self._create_secured_entity_from_file()
self._login_user()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME - timedelta(seconds=1))
response = self.client.get(self.access_url)
self.assertContains(response, 'Sorry, this secured entity is no longer available')
def test_access_secured_entity_from_url_returns_correct_response__unauthorized(self):
self._create_secured_entity_from_url()
response = self.client.get(self.access_url)
self.assertContains(response, 'Please provide password in order to access this secured entity.')
self.assertContains(response, '<input id="id_password" name="password" type="text">')
self.assertContains(response, 'Go go go!')
def test_access_secured_entity_from_url_without_password_returns_correct_response__unauthorized(self):
self._create_secured_entity_from_url()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk})
self.assertContains(response, 'This field is required')
def test_access_secured_entity_from_url_wrong_password_returns_correct_response__unauthorized(self):
self._create_secured_entity_from_url()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk, 'password': 'xxx'})
self.assertContains(response, 'Password do not match')
def test_access_secured_entity_from_url_correct_password_results_in_302__unauthorized(self):
self._create_secured_entity_from_url()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
def test_access_secured_entity_from_url_correct_password_returns_correct_response__unauthorized(self):
self._create_secured_entity_from_url()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertEqual(response['Location'], self.secured_entity.url)
def test_access_secured_entity_from_url_correct_password_just_before_deadline_results_in_302__unauthorized(self):
self._create_secured_entity_from_url()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME + timedelta(seconds=1))
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
def test_access_secured_entity_from_url_correct_password_just_before_deadline_returns_correct_response__unauthorized(self):
self._create_secured_entity_from_url()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME + timedelta(seconds=1))
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertEqual(response['Location'], self.secured_entity.url)
def test_access_secured_entity_from_url_correct_password_just_after_deadline_returns_correct_response__unauthorized(self):
self._create_secured_entity_from_url()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME - timedelta(seconds=1))
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertContains(response, 'Sorry, this secured entity is no longer available')
def test_access_secured_entity_from_url_just_after_deadline_returns_correct_response__unauthorized(self):
self._create_secured_entity_from_url()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME - timedelta(seconds=1))
response = self.client.get(self.access_url)
self.assertContains(response, 'Sorry, this secured entity is no longer available')
def test_access_secured_entity_from_file_returns_correct_response__unauthorized(self):
self._create_secured_entity_from_file()
response = self.client.get(self.access_url)
self.assertContains(response, 'Please provide password in order to access this secured entity.')
self.assertContains(response, '<input id="id_password" name="password" type="text">')
self.assertContains(response, 'Go go go!')
def test_access_secured_entity_from_file_without_password_returns_correct_response__unauthorized(self):
self._create_secured_entity_from_file()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk})
self.assertContains(response, 'This field is required')
def test_access_secured_entity_from_file_wrong_password_returns_correct_response__unauthorized(self):
self._create_secured_entity_from_file()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk, 'password': 'xxx'})
self.assertContains(response, 'Password do not match')
def test_access_secured_entity_from_file_correct_password_results_in_302__unauthorized(self):
self._create_secured_entity_from_file()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
def test_access_secured_entity_from_file_correct_password_returns_correct_response__unauthorized(self):
self._create_secured_entity_from_file()
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertIn(settings.MEDIA_URL, response['Location'])
self.assertIn(self.tmp_file.name.split('/')[-1], response['Location'])
def test_access_secured_entity_from_file_correct_password_just_before_deadline_results_in_302__unauthorized(self):
self._create_secured_entity_from_file()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME + timedelta(seconds=1))
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertEqual(response.status_code, status.HTTP_302_FOUND)
def test_access_secured_entity_from_file_correct_password_just_before_deadline_returns_correct_response__unauthorized(self):
self._create_secured_entity_from_file()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME + timedelta(seconds=1))
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertIn(settings.MEDIA_URL, response['Location'])
self.assertIn(self.tmp_file.name.split('/')[-1], response['Location'])
def test_access_secured_entity_from_file_correct_password_just_after_deadline_returns_correct_response__unauthorized(self):
self._create_secured_entity_from_file()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME - timedelta(seconds=1))
response = self.client.post(self.access_url, {'id': self.secured_entity.pk,
'password': self.secured_entity.password})
self.assertContains(response, 'Sorry, this secured entity is no longer available')
def test_access_secured_entity_from_file_just_after_deadline_returns_correct_response__unauthorized(self):
self._create_secured_entity_from_file()
SecuredEntity.objects.filter(pk=self.secured_entity.pk).update(
created=self.secured_entity.created - settings.SECURED_ENTITY_ACCESSIBLE_TIME - timedelta(seconds=1))
response = self.client.get(self.access_url)
self.assertContains(response, 'Sorry, this secured entity is no longer available')
| 48.282776
| 128
| 0.717123
| 2,235
| 18,782
| 5.609396
| 0.044743
| 0.199091
| 0.116615
| 0.068198
| 0.963229
| 0.961155
| 0.95597
| 0.946319
| 0.943208
| 0.93914
| 0
| 0.004686
| 0.19327
| 18,782
| 388
| 129
| 48.407216
| 0.82273
| 0
| 0
| 0.78629
| 0
| 0
| 0.077894
| 0.00197
| 0
| 0
| 0
| 0
| 0.197581
| 1
| 0.157258
| false
| 0.258065
| 0.024194
| 0
| 0.185484
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
a77f9df1dd3a1dbaee2404fb136e0a2c7a8300b6
| 50,030
|
py
|
Python
|
tests/test_runner/test_runner_test.py
|
Neerajakshulu/golem
|
06e685d56f51df82a8f80acd1f2601ca16fb4fc0
|
[
"MIT"
] | null | null | null |
tests/test_runner/test_runner_test.py
|
Neerajakshulu/golem
|
06e685d56f51df82a8f80acd1f2601ca16fb4fc0
|
[
"MIT"
] | null | null | null |
tests/test_runner/test_runner_test.py
|
Neerajakshulu/golem
|
06e685d56f51df82a8f80acd1f2601ca16fb4fc0
|
[
"MIT"
] | null | null | null |
import os
import json
from golem.test_runner import test_runner, execution_logger
from golem.test_runner.start_execution import define_drivers
from golem.gui import gui_utils
from golem.core import settings_manager
def _define_drivers_mock(selected_drivers):
default_browsers = gui_utils.get_supported_browsers_suggestions()
return define_drivers(selected_drivers, [], default_browsers)
class Test__get_set_name:
def test___get_set_name(self):
test_data = {
'username': 'user01',
'set_name': 'set01'
}
assert test_runner._get_set_name(test_data) == 'set01'
def test___get_set_name__no_set_name_present(self):
test_data = {
'username': 'user01',
'password': 'password01'
}
actual_value = test_runner._get_set_name(test_data)
# Python 3.4 result not in order TODO
assert actual_value == 'user01' or actual_value == 'password01'
def test___get_set_name__empty_data(self):
test_data = {}
assert test_runner._get_set_name(test_data) == ''
# TestRunner decision table
#
# CE : code error
# S : success
# F : failure
# E : error
#
# * * * * * * * * * * * * * * * * * * * * * * *
# A0 A1 A2 A3 A4 A5 A6 A7 A8 A9 B0 B1 B2 B3 B4 B5 B6 B7 B8 B9 C0 C1 C2 C3 C4 C5 C6 C7 C8 C9 D0 D1 D2 D3 D4 D5 D6 D7 D8 D9 E0 E1 E2 E3 E4 E5
# import error test Y N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N
# import error page N Y N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N N
# soft error in setup . . N N Y N N N N N N Y N N N N N Y Y Y Y N N N N N N N N N N N N N N N Y Y N N N N N N N N
# exception in setup . . N . . . . . . . Y Y Y Y Y Y Y N N N N N N N N N N N N N N N N N N N N N N N N N N N N N
# failure in setup . . N Y Y Y Y Y Y Y . . . . . . . N N N N N N N N N N N N N N N N N N N N N N N N N N N N N
# soft error in test . . N . . . . . . . . . . . . . . N N N N N Y N N N N N N Y N N N N N Y Y Y Y Y Y N N N N N
# exception in test . . N . . . . . . . . . . . . . . N N N N . . . . . . . Y Y Y Y Y Y Y N N N N N N N N N N N
# failure in test . . N . . . . . . . . . . . . . . N N N N Y Y Y Y Y Y Y . . . . . . . N N N N N N N N N N N
# soft error in teardown . . N N N Y N Y N Y N N Y N Y N Y N Y N N N N Y Y N Y N N N Y Y N Y N N N Y Y N N Y N Y N Y
# exception in teardown . . N N N N Y Y . . N N N Y Y . . N N Y . N N N Y Y . . N N N Y Y . . N N N N Y . N Y Y . .
# failure in teardown . . N N N N . . Y Y N N N . . Y Y N N . Y N N N . . Y Y N N N . . Y Y N N N N . Y N . . Y Y
#
# result CE CE S F F F F F F F CE CE CE CE CE CE CE E E CE F F F F F F F F CE CE CE CE CE CE CE E E E E E E E CE CE F F
# setup is run N N Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y
# test is run N N Y N N N N N N N N N N N N N N Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y
# teardown is run N N Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y Y
class Test_run_test:
def _create_test(self, testdir, project, name, content):
path = os.path.join(testdir, 'projects', project, 'tests', name + '.py')
with open(path, 'w+') as f:
f.write(content)
def _create_page(self, testdir, project, name, content):
path = os.path.join(testdir, 'projects', project, 'pages', name + '.py')
with open(path, 'w+') as f:
f.write(content)
def _mock_report_directory(self, testdir, project, test_name):
path = os.path.join(testdir, 'projects', project, 'reports', 'single_tests',
test_name, '00001')
os.makedirs(path)
return path
def _read_report_json(self, report_directory):
report_path = os.path.join(report_directory, 'report.json')
with open(report_path) as f:
return json.load(f)
# SUCCESS TESTS
# A2
def test_run_test__success(self, project_function_clean, caplog, test_utils):
"""Test runs successfully"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'some description'
def setup(data):
step('setup step')
def test(data):
step('test step')
def teardown(data):
step('teardown step')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project, test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
# run test
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[0].message == 'Test execution started: {}'.format(test_name)
assert records[1].message == 'Browser: chrome'
assert records[2].message == 'setup step'
assert records[3].message == 'test step'
assert records[4].message == 'teardown step'
assert records[5].message == 'Test Result: SUCCESS'
# verify report.json
report = self._read_report_json(report_directory)
assert report['browser'] == 'chrome'
assert report['description'] == 'some description'
assert report['environment'] == ''
assert report['errors'] == []
assert report['result'] == 'success'
assert report['set_name'] == ''
assert report['steps'] == [
{'message': 'setup step', 'screenshot': None, 'error': None},
{'message': 'test step', 'screenshot': None, 'error': None},
{'message': 'teardown step', 'screenshot': None, 'error': None},
]
assert report['test_case'] == test_name
assert report['test_data'] == {}
assert 'test_elapsed_time' in report
assert 'test_timestamp' in report
assert len(report.keys()) == 11
# A2
def test_run_test__success_with_data(self, project_function_clean,
caplog, test_utils):
"""Test runs successfully with test data"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'some description'
def setup(data):
step('setup step')
def test(data):
step('test step')
def teardown(data):
step('teardown step')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_data = dict(username='username1', password='password1')
# run test
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data=test_data,
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[0].message == 'Test execution started: {}'.format(
test_name)
assert records[1].message == 'Browser: chrome'
# Python 3.4 results not in order TODO
value_a = 'Using data:\n username: username1\n password: password1\n'
value_b = 'Using data:\n password: password1\n username: username1\n'
assert records[2].message == value_a or records[2].message == value_b
assert records[3].message == 'setup step'
assert records[4].message == 'test step'
assert records[5].message == 'teardown step'
assert records[6].message == 'Test Result: SUCCESS'
# verify report.json
report = self._read_report_json(report_directory)
assert report['browser'] == 'chrome'
assert report['description'] == 'some description'
assert report['environment'] == ''
assert report['errors'] == []
assert report['result'] == 'success'
# Python 3.4 TODO
assert report['set_name'] == 'username1' or report['set_name'] == 'password1'
assert report['steps'] == [
{'message': 'setup step', 'screenshot': None, 'error': None},
{'message': 'test step', 'screenshot': None, 'error': None},
{'message': 'teardown step', 'screenshot': None, 'error': None},
]
assert report['test_case'] == test_name
assert report['test_data'] == {'username': "'username1'", 'password': "'password1'"}
assert 'test_elapsed_time' in report
assert 'test_timestamp' in report
assert len(report.keys()) == 11
# ERRORS in import_modules
# A0
def test_run_test__import_error_on_test(self, project_function_clean,
caplog, test_utils):
"""The test fails with 'code error' when it has a syntax error
Test result is code error"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'some description'
# missing colon
def test(data)
step('this step wont be run')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[0].message == 'Test execution started: {}'.format(
test_name)
assert records[1].message == 'Browser: chrome'
assert records[2].levelname == 'ERROR'
error_contains = 'def test(data)\n ^\nSyntaxError: invalid syntax'
assert error_contains in records[2].message
assert records[3].message == 'Test Result: CODE ERROR'
# verify report.json
report = self._read_report_json(report_directory)
assert report['browser'] == 'chrome'
assert report['description'] == None # description could not be read
assert report['environment'] == ''
assert len(report['errors']) == 1
assert report['errors'][0]['message'] == 'SyntaxError: invalid syntax'
assert error_contains in report['errors'][0]['description']
assert report['result'] == 'code error'
assert report['set_name'] == ''
assert report['steps'] == []
assert report['test_case'] == test_name
assert report['test_data'] == {}
# A1
def test_run_test__import_error_page_object(self, project_function_clean,
caplog, test_utils):
"""The test fails with 'code error' when an imported page has a syntax error"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
pages = ['page1']
def setup(data):
step('this step wont be run')
def test(data):
step('this step wont be run')
def teardown(data):
step('this step wont be run')
"""
self._create_test(testdir, project, test_name, content)
page_content = """
element1 = ('id', 'someId'
element2 = ('css', '.oh.no')
"""
self._create_page(testdir, project, 'page1', page_content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[0].message == 'Test execution started: {}'.format(
test_name)
assert records[1].message == 'Browser: chrome'
assert records[2].levelname == 'ERROR'
error_contains = "element2 = ('css', '.oh.no')\n ^\nSyntaxError: invalid syntax"
assert error_contains in records[2].message
assert records[3].message == 'Test Result: CODE ERROR'
# verify report.json
report = self._read_report_json(report_directory)
assert report['browser'] == 'chrome'
assert report['description'] == None # description could not be read
assert report['environment'] == ''
assert len(report['errors']) == 1
assert 'SyntaxError: invalid syntax' in report['errors'][0]['message']
assert error_contains in report['errors'][0]['description']
assert report['result'] == 'code error'
assert report['set_name'] == ''
assert report['steps'] == []
assert report['test_case'] == test_name
assert report['test_data'] == {}
assert 'test_elapsed_time' in report
assert 'test_timestamp' in report
assert len(report.keys()) == 11
# A3
def test_run_test__AssertionError_in_setup(self, project_function_clean,
caplog, test_utils):
"""The test ends with 'failure' when the setup function throws AssertionError.
Test is not run
Teardown is run
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
fail('setup step fail')
def test(data):
step('test step')
def teardown(data):
step('teardown step')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[0].message == 'Test execution started: {}'.format(
test_name)
assert records[1].message == 'Browser: chrome'
assert records[2].levelname == 'ERROR'
assert 'setup step fail' in records[2].message
assert 'AssertionError: setup step fail' in records[2].message
assert records[3].message == 'teardown step'
assert records[4].message == 'Test Result: FAILURE'
# verify report.json
report = self._read_report_json(report_directory)
assert report['description'] == 'desc'
assert len(report['errors']) == 1
assert 'setup step fail' in report['errors'][0]['message']
assert report['result'] == 'failure'
assert report['steps'][0]['message'] == 'Failure'
assert 'AssertionError: setup step fail' in report['steps'][0]['error']['description']
assert report['steps'][1]['message'] == 'teardown step'
# A4
def test_run_test__failure_and_error_in_setup(self, project_function_clean,
caplog, test_utils):
"""The test ends with 'failure' when the setup function throws AssertionError,
even when there's an error in setup
Test is not run
Teardown is run
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
error('error in setup')
fail('setup step fail')
def test(data):
step('test step')
def teardown(data):
step('teardown step')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[5].message == 'Test Result: FAILURE'
# verify report.json
report = self._read_report_json(report_directory)
assert len(report['errors']) == 2
assert report['result'] == 'failure'
assert len(report['steps']) == 3
assert report['errors'][0]['message'] == 'error in setup'
assert report['errors'][1]['message'] == 'AssertionError: setup step fail'
# A5
def test_run_test__failure_in_setup_error_in_teardown(self, project_function_clean,
caplog, test_utils):
"""Setup throws AssertionError
Teardown throws error
Test ends with 'failure'
test() is not run
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
fail('setup step fail')
def test(data):
step('test step')
def teardown(data):
step('teardown step')
error('error in teardown')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[5].message == 'Test Result: FAILURE'
# verify report.json
report = self._read_report_json(report_directory)
assert len(report['errors']) == 2
assert report['result'] == 'failure'
assert len(report['steps']) == 3
assert report['errors'][0]['message'] == 'AssertionError: setup step fail'
assert report['errors'][1]['message'] == 'error in teardown'
# A6
def test_run_test__failure_in_setup_exception_in_teardown(self, project_function_clean,
caplog, test_utils):
"""Setup throws AssertionError
Teardown throws AssertionError
Test ends with 'failure'
test() is not run
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
fail('setup step fail')
def test(data):
step('test step')
def teardown(data):
step('teardown step')
foo = bar
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[5].message == 'Test Result: FAILURE'
# verify report.json
report = self._read_report_json(report_directory)
assert len(report['errors']) == 2
assert report['result'] == 'failure'
assert len(report['steps']) == 3
assert report['errors'][0]['message'] == 'AssertionError: setup step fail'
assert report['errors'][1]['message'] == "NameError: name 'bar' is not defined"
# A8
def test_run_test__failure_in_setup_failure_in_teardown(self, project_function_clean,
caplog, test_utils):
"""Setup throws AssertionError
Teardown throws exception
Test ends with 'failure'
test() is not run
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
fail('setup step fail')
def test(data):
step('test step')
def teardown(data):
fail('failure in teardown')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[4].message == 'Test Result: FAILURE'
# verify report.json
report = self._read_report_json(report_directory)
assert len(report['errors']) == 2
assert report['result'] == 'failure'
assert len(report['steps']) == 2
assert report['errors'][0]['message'] == 'AssertionError: setup step fail'
assert report['errors'][1]['message'] == 'AssertionError: failure in teardown'
# B0
def test_run_test__exception_in_setup(self, project_function_clean,
caplog, test_utils):
"""Setup throws exception
Test ends with 'code error'
test() is not run
teardown() is run
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
foo = bar
def test(data):
step('test step')
def teardown(data):
step('teardown step')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[4].message == 'Test Result: CODE ERROR'
# verify report.json
report = self._read_report_json(report_directory)
assert len(report['errors']) == 1
assert report['result'] == 'code error'
assert len(report['steps']) == 2
assert report['errors'][0]['message'] == "NameError: name 'bar' is not defined"
# B1
def test_run_test__exception_and_error_in_setup(self, project_function_clean,
caplog, test_utils):
"""Setup has error and throws exception
Test ends with 'code error'
test() is not run
teardown() is run
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
error('setup error')
foo = bar
def test(data):
step('test step')
def teardown(data):
step('teardown step')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[5].message == 'Test Result: CODE ERROR'
# verify report.json
report = self._read_report_json(report_directory)
assert report['result'] == 'code error'
assert len(report['steps']) == 3
assert len(report['errors']) == 2
assert report['errors'][0]['message'] == 'setup error'
assert report['errors'][1]['message'] == "NameError: name 'bar' is not defined"
# B3
def test_run_test__exception_in_setup_exception_in_teardown(self, project_function_clean,
caplog, test_utils):
"""Setup throws exception
Teardown throws exception
Test ends with 'code error'
test() is not run
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
foo = bar
def test(data):
step('test step')
def teardown(data):
foo = baz
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[4].message == 'Test Result: CODE ERROR'
# verify report.json
report = self._read_report_json(report_directory)
assert report['result'] == 'code error'
assert len(report['steps']) == 2
assert len(report['errors']) == 2
assert report['errors'][0]['message'] == "NameError: name 'bar' is not defined"
assert report['errors'][1]['message'] == "NameError: name 'baz' is not defined"
# B5
def test_run_test__exception_in_setup_failure_in_teardown(self, project_function_clean,
caplog, test_utils):
"""Setup throws exception
Teardown throws AssertionError
Test ends with 'code error'
test() is not run
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
foo = bar
def test(data):
step('test step')
def teardown(data):
fail('teardown failure')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[4].message == 'Test Result: CODE ERROR'
# verify report.json
report = self._read_report_json(report_directory)
assert report['result'] == 'code error'
assert len(report['steps']) == 2
assert len(report['errors']) == 2
assert report['errors'][0]['message'] == "NameError: name 'bar' is not defined"
assert report['errors'][1]['message'] == 'AssertionError: teardown failure'
# B7
def test_run_test__error_in_setup(self, project_function_clean,
caplog, test_utils):
"""Setup has error
test() is run
teardown() is run
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
error('setup error')
def test(data):
step('test step')
def teardown(data):
step('teardown step')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[5].message == 'Test Result: ERROR'
# verify report.json
report = self._read_report_json(report_directory)
assert report['result'] == 'error'
assert len(report['steps']) == 3
assert len(report['errors']) == 1
assert report['errors'][0]['message'] == "setup error"
# B9
def test_run_test__error_in_setup_exception_in_teardown(self, project_function_clean,
caplog, test_utils):
"""Setup has error
Teardown throws exception
test() is run
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
error('setup error')
def test(data):
step('test step')
def teardown(data):
foo = bar
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[5].message == 'Test Result: CODE ERROR'
# verify report.json
report = self._read_report_json(report_directory)
assert report['result'] == 'code error'
assert len(report['steps']) == 3
assert len(report['errors']) == 2
assert report['errors'][0]['message'] == 'setup error'
assert report['errors'][1]['message'] == "NameError: name 'bar' is not defined"
# C0
def test_run_test__error_in_setup_failure_in_teardown(self, project_function_clean,
caplog, test_utils):
"""Setup has error
Teardown throws AssertionError
test() is run
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
error('setup error')
def test(data):
step('test step')
def teardown(data):
fail('teardown fail')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[5].message == 'Test Result: FAILURE'
# verify report.json
report = self._read_report_json(report_directory)
assert report['result'] == 'failure'
assert len(report['steps']) == 3
assert len(report['errors']) == 2
assert report['errors'][0]['message'] == 'setup error'
assert report['errors'][1]['message'] == 'AssertionError: teardown fail'
# C1
def test_run_test__failure_in_test(self, project_function_clean,
caplog, test_utils):
"""test() throws AssertionError
teardown() is run
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
step('setup step')
def test(data):
step('test step')
fail('test fail')
def teardown(data):
step('teardown step')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[6].message == 'Test Result: FAILURE'
# verify report.json
report = self._read_report_json(report_directory)
assert report['result'] == 'failure'
assert len(report['steps']) == 4
assert len(report['errors']) == 1
assert report['errors'][0]['message'] == 'AssertionError: test fail'
# C2
def test_run_test__failure_and_error_in_test(self, project_function_clean,
caplog, test_utils):
"""test() has error and throws AssertionError
teardown() is run
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
step('setup step')
def test(data):
error('test error')
fail('test fail')
def teardown(data):
step('teardown step')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[6].message == 'Test Result: FAILURE'
# verify report.json
report = self._read_report_json(report_directory)
assert report['result'] == 'failure'
assert len(report['steps']) == 4
assert len(report['errors']) == 2
assert report['errors'][0]['message'] == 'test error'
assert report['errors'][1]['message'] == 'AssertionError: test fail'
# C5
def test_run_test__failure_in_test_exception_in_teardown(self, project_function_clean,
caplog, test_utils):
"""test() throws AssertionError
teardown() throws exception
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
step('setup step')
def test(data):
fail('test fail')
def teardown(data):
foo = bar
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[5].message == 'Test Result: FAILURE'
# verify report.json
report = self._read_report_json(report_directory)
assert report['result'] == 'failure'
assert len(report['steps']) == 3
assert len(report['errors']) == 2
assert report['errors'][0]['message'] == 'AssertionError: test fail'
assert report['errors'][1]['message'] == "NameError: name 'bar' is not defined"
# C7
def test_run_test__failure_in_test_failure_in_teardown(self, project_function_clean,
caplog, test_utils):
"""test() throws AssertionError
teardown() throws AssertionError
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
step('setup step')
def test(data):
fail('test fail')
def teardown(data):
fail('teardown fail')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[5].message == 'Test Result: FAILURE'
# verify report.json
report = self._read_report_json(report_directory)
assert report['result'] == 'failure'
assert len(report['steps']) == 3
assert len(report['errors']) == 2
assert report['errors'][0]['message'] == 'AssertionError: test fail'
assert report['errors'][1]['message'] == 'AssertionError: teardown fail'
# C8
def test_run_test__exception_in_test(self, project_function_clean,
caplog, test_utils):
"""test() throws exception"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
step('setup step')
def test(data):
foo = bar
def teardown(data):
step('teardown step')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[5].message == 'Test Result: CODE ERROR'
# verify report.json
report = self._read_report_json(report_directory)
assert report['result'] == 'code error'
assert len(report['steps']) == 3
assert len(report['errors']) == 1
assert report['errors'][0]['message'] == "NameError: name 'bar' is not defined"
# C9
def test_run_test__error_and_exception_in_test(self, project_function_clean,
caplog, test_utils):
"""test() throws error and AssertionError
teardown()
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
step('setup step')
def test(data):
error('error in test')
foo = bar
def teardown(data):
step('teardown step')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[6].message == 'Test Result: CODE ERROR'
# verify report.json
report = self._read_report_json(report_directory)
assert report['result'] == 'code error'
assert len(report['steps']) == 4
assert len(report['errors']) == 2
assert report['errors'][0]['message'] == 'error in test'
assert report['errors'][1]['message'] == "NameError: name 'bar' is not defined"
# D4
def test_run_test__exception_in_test_failure_in_teardown(self, project_function_clean,
caplog, test_utils):
"""test() throws exception
teardown() throws AssertionError
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
step('setup step')
def test(data):
foo = bar
def teardown(data):
fail('teardown fail')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[5].message == 'Test Result: CODE ERROR'
# verify report.json
report = self._read_report_json(report_directory)
assert report['result'] == 'code error'
assert len(report['steps']) == 3
assert len(report['errors']) == 2
assert report['errors'][0]['message'] == "NameError: name 'bar' is not defined"
assert report['errors'][1]['message'] == 'AssertionError: teardown fail'
# D7
def test_run_test__error_in_setup_test_and_teardown(self, project_function_clean,
caplog, test_utils):
"""setup(), test() and teardown() have errors
"""
testdir = project_function_clean['testdir']
project = project_function_clean['name']
test_name = test_utils.random_numeric_string(10)
content = """
description = 'desc'
def setup(data):
error('setup error')
def test(data):
error('test error')
def teardown(data):
error('teardown error')
"""
self._create_test(testdir, project, test_name, content)
report_directory = self._mock_report_directory(testdir, project,
test_name)
settings = settings_manager.get_project_settings(testdir, project)
browser = _define_drivers_mock(['chrome'])[0]
test_runner.run_test(workspace=testdir, project=project,
test_name=test_name, test_data={},
browser=browser, settings=settings,
report_directory=report_directory)
# verify console logs
records = caplog.records
assert records[5].message == 'Test Result: ERROR'
# verify report.json
report = self._read_report_json(report_directory)
assert report['result'] == 'error'
assert len(report['steps']) == 3
assert len(report['errors']) == 3
assert report['errors'][0]['message'] == 'setup error'
assert report['errors'][1]['message'] == 'test error'
assert report['errors'][2]['message'] == 'teardown error'
| 42.870608
| 207
| 0.572097
| 5,607
| 50,030
| 4.893348
| 0.040842
| 0.0199
| 0.025586
| 0.029595
| 0.92255
| 0.909575
| 0.89372
| 0.874549
| 0.862339
| 0.858986
| 0
| 0.009538
| 0.325245
| 50,030
| 1,166
| 208
| 42.907376
| 0.803217
| 0.129322
| 0
| 0.862428
| 0
| 0
| 0.193682
| 0
| 0
| 0
| 0
| 0.000858
| 0.23237
| 1
| 0.036994
| false
| 0.008092
| 0.009249
| 0
| 0.052023
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a7d5a8daae7ee196ed2004ebfa08fa5b017107c0
| 8,201
|
py
|
Python
|
wntr/tests/test_multiple_simulations.py
|
xiamo311/AquaSCALE
|
28968d1b349c2370d8c20bda5b6675270e4ab65d
|
[
"BSD-3-Clause"
] | null | null | null |
wntr/tests/test_multiple_simulations.py
|
xiamo311/AquaSCALE
|
28968d1b349c2370d8c20bda5b6675270e4ab65d
|
[
"BSD-3-Clause"
] | null | null | null |
wntr/tests/test_multiple_simulations.py
|
xiamo311/AquaSCALE
|
28968d1b349c2370d8c20bda5b6675270e4ab65d
|
[
"BSD-3-Clause"
] | null | null | null |
# These tests run a demand-driven simulation with both WNTR and Epanet and compare the results for the example networks
import unittest
from os.path import abspath, dirname, join
import pandas as pd
import pickle
testdir = dirname(abspath(str(__file__)))
test_datadir = join(testdir,'networks_for_testing')
ex_datadir = join(testdir,'..','..','examples','networks')
class TestResetInitialValues(unittest.TestCase):
@classmethod
def setUpClass(self):
import wntr
self.wntr = wntr
inp_file = join(ex_datadir, 'Net3.inp')
self.wn = self.wntr.network.WaterNetworkModel(inp_file)
self.wn.options.hydraulic_timestep = 3600
self.wn.options.duration = 24*3600
sim = self.wntr.sim.WNTRSimulator(self.wn)
self.res1 = sim.run_sim(solver_options={'TOL':1e-8})
self.wn.reset_initial_values()
self.res2 = sim.run_sim(solver_options={'TOL':1e-8})
@classmethod
def tearDownClass(self):
pass
def test_link_flowrate(self):
for link_name, link in self.wn.links():
for t in self.res1.link.major_axis:
self.assertAlmostEqual(self.res1.link.at['flowrate',t,link_name], self.res2.link.at['flowrate',t,link_name], 7)
def test_link_velocity(self):
for link_name, link in self.wn.links():
for t in self.res1.link.major_axis:
self.assertAlmostEqual(self.res1.link.at['velocity',t,link_name], self.res2.link.at['velocity',t,link_name], 7)
def test_node_demand(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node.major_axis:
self.assertAlmostEqual(self.res1.node.at['demand',t,node_name], self.res2.node.at['demand',t,node_name], 7)
def test_node_expected_demand(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node.major_axis:
self.assertAlmostEqual(self.res1.node.at['expected_demand',t,node_name], self.res2.node.at['expected_demand',t,node_name], 7)
def test_node_head(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node.major_axis:
self.assertAlmostEqual(self.res1.node.at['head',t,node_name], self.res2.node.at['head',t,node_name], 7)
def test_node_pressure(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node.major_axis:
self.assertAlmostEqual(self.res1.node.at['pressure',t,node_name], self.res2.node.at['pressure',t,node_name], 7)
class TestStopStartSim(unittest.TestCase):
@classmethod
def setUpClass(self):
import wntr
self.wntr = wntr
inp_file = join(ex_datadir, 'Net3.inp')
parser = self.wntr.epanet.InpFile()
self.wn = parser.read(inp_file)
self.wn.options.hydraulic_timestep = 3600
self.wn.options.duration = 24*3600
sim = self.wntr.sim.WNTRSimulator(self.wn)
self.res1 = sim.run_sim(solver_options={'TOL':1e-8})
parser = self.wntr.epanet.InpFile()
self.wn = parser.read(inp_file)
self.wn.options.hydraulic_timestep = 3600
self.wn.options.duration = 10*3600
sim = self.wntr.sim.WNTRSimulator(self.wn)
self.res2 = sim.run_sim(solver_options={'TOL':1e-8})
self.wn.options.duration = 24*3600
self.res3 = sim.run_sim(solver_options={'TOL':1e-8})
node_res = pd.concat([self.res2.node,self.res3.node],axis=1)
link_res = pd.concat([self.res2.link,self.res3.link],axis=1)
self.res2.node = node_res
self.res2.link = link_res
@classmethod
def tearDownClass(self):
pass
def test_link_flowrate(self):
for link_name, link in self.wn.links():
for t in self.res1.link.major_axis:
self.assertAlmostEqual(self.res1.link.at['flowrate',t,link_name], self.res2.link.at['flowrate',t,link_name], 7)
def test_link_velocity(self):
for link_name, link in self.wn.links():
for t in self.res1.link.major_axis:
self.assertAlmostEqual(self.res1.link.at['velocity',t,link_name], self.res2.link.at['velocity',t,link_name], 7)
def test_node_demand(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node.major_axis:
self.assertAlmostEqual(self.res1.node.at['demand',t,node_name], self.res2.node.at['demand',t,node_name], 7)
def test_node_expected_demand(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node.major_axis:
self.assertAlmostEqual(self.res1.node.at['expected_demand',t,node_name], self.res2.node.at['expected_demand',t,node_name], 7)
def test_node_head(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node.major_axis:
self.assertAlmostEqual(self.res1.node.at['head',t,node_name], self.res2.node.at['head',t,node_name], 7)
def test_node_pressure(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node.major_axis:
self.assertAlmostEqual(self.res1.node.at['pressure',t,node_name], self.res2.node.at['pressure',t,node_name], 7)
class TestPickle(unittest.TestCase):
@classmethod
def setUpClass(self):
import wntr
self.wntr = wntr
inp_file = join(ex_datadir, 'Net3.inp')
parser = self.wntr.epanet.InpFile()
self.wn = parser.read(inp_file)
self.wn.options.hydraulic_timestep = 3600
self.wn.options.duration = 24*3600
sim = self.wntr.sim.WNTRSimulator(self.wn)
self.res1 = sim.run_sim(solver_options={'TOL':1e-8})
parser = self.wntr.epanet.InpFile()
self.wn = parser.read(inp_file)
self.wn.options.hydraulic_timestep = 3600
self.wn.options.duration = 10*3600
sim = self.wntr.sim.WNTRSimulator(self.wn)
self.res2 = sim.run_sim(solver_options={'TOL':1e-8})
f=open('pickle_test.pickle','wb')
pickle.dump(self.wn,f)
f.close()
f=open('pickle_test.pickle','rb')
wn2 = pickle.load(f)
f.close()
wn2.options.duration = 24*3600
sim = self.wntr.sim.WNTRSimulator(wn2)
self.res3 = sim.run_sim(solver_options={'TOL':1e-8})
node_res = pd.concat([self.res2.node,self.res3.node],axis=1)
link_res = pd.concat([self.res2.link,self.res3.link],axis=1)
self.res2.node = node_res
self.res2.link = link_res
@classmethod
def tearDownClass(self):
pass
def test_link_flowrate(self):
for link_name, link in self.wn.links():
for t in self.res1.link.major_axis:
self.assertAlmostEqual(self.res1.link.at['flowrate',t,link_name], self.res2.link.at['flowrate',t,link_name], 7)
def test_link_velocity(self):
for link_name, link in self.wn.links():
for t in self.res1.link.major_axis:
self.assertAlmostEqual(self.res1.link.at['velocity',t,link_name], self.res2.link.at['velocity',t,link_name], 7)
def test_node_demand(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node.major_axis:
self.assertAlmostEqual(self.res1.node.at['demand',t,node_name], self.res2.node.at['demand',t,node_name], 7)
def test_node_expected_demand(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node.major_axis:
self.assertAlmostEqual(self.res1.node.at['expected_demand',t,node_name], self.res2.node.at['expected_demand',t,node_name], 7)
def test_node_head(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node.major_axis:
self.assertAlmostEqual(self.res1.node.at['head',t,node_name], self.res2.node.at['head',t,node_name], 7)
def test_node_pressure(self):
for node_name, node in self.wn.nodes():
for t in self.res1.node.major_axis:
self.assertAlmostEqual(self.res1.node.at['pressure',t,node_name], self.res2.node.at['pressure',t,node_name], 7)
if __name__ == '__main__':
unittest.main()
| 40.800995
| 141
| 0.644921
| 1,204
| 8,201
| 4.245847
| 0.08804
| 0.048122
| 0.056338
| 0.035211
| 0.909624
| 0.901408
| 0.8973
| 0.8973
| 0.8973
| 0.887911
| 0
| 0.028178
| 0.221071
| 8,201
| 200
| 142
| 41.005
| 0.772073
| 0.014267
| 0
| 0.873418
| 0
| 0
| 0.053205
| 0
| 0
| 0
| 0
| 0
| 0.113924
| 1
| 0.151899
| false
| 0.018987
| 0.044304
| 0
| 0.21519
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
38f967c7128dc5b5379e52bcbeb53d84213a8727
| 1,454
|
py
|
Python
|
python/graphscope/experimental/nx/tests/algorithms/forward/tree/test_branchings.py
|
wenyuanyu/GraphScope
|
a40ccaf70557e608d8b091eb25ab04477f99ce21
|
[
"Apache-2.0"
] | 2
|
2020-12-15T08:42:10.000Z
|
2022-01-14T09:13:16.000Z
|
python/graphscope/experimental/nx/tests/algorithms/forward/tree/test_branchings.py
|
wenyuanyu/GraphScope
|
a40ccaf70557e608d8b091eb25ab04477f99ce21
|
[
"Apache-2.0"
] | 1
|
2020-12-22T13:15:40.000Z
|
2020-12-22T13:15:40.000Z
|
python/graphscope/experimental/nx/tests/algorithms/forward/tree/test_branchings.py
|
wenyuanyu/GraphScope
|
a40ccaf70557e608d8b091eb25ab04477f99ce21
|
[
"Apache-2.0"
] | 1
|
2021-11-23T03:40:43.000Z
|
2021-11-23T03:40:43.000Z
|
import networkx.algorithms.tree.tests.test_branchings
import pytest
from graphscope.experimental.nx.utils.compat import import_as_graphscope_nx
import_as_graphscope_nx(networkx.algorithms.tree.tests.test_branchings,
decorators=pytest.mark.usefixtures("graphscope_session"))
@pytest.mark.skip(reason="not support multigraph")
def test_edge_attribute_preservation_multigraph(self):
pass
@pytest.mark.skip(reason="not support multigraph")
def test_greedy_max1(self):
pass
@pytest.mark.skip(reason="not support multigraph")
def test_greedy_max2(self):
pass
@pytest.mark.skip(reason="not support multigraph")
def test_greedy_max3(self):
pass
@pytest.mark.skip(reason="not support multigraph")
def test_greedy_min(self):
pass
@pytest.mark.skip(reason="not support multigraph")
def test_edmonds1_maxbranch(self):
pass
@pytest.mark.skip(reason="not support multigraph")
def test_edmonds1_maxarbor(self):
pass
@pytest.mark.skip(reason="not support multigraph")
def test_edmonds2_maxbranch(self):
pass
@pytest.mark.skip(reason="not support multigraph")
def test_edmonds2_maxarbor(self):
pass
@pytest.mark.skip(reason="not support multigraph")
def test_edmonds2_minarbor(self):
pass
@pytest.mark.skip(reason="not support multigraph")
def test_edmonds3_minbranch1(self):
pass
@pytest.mark.skip(reason="not support multigraph")
def test_edmonds3_minbranch2(self):
pass
| 21.382353
| 81
| 0.770289
| 194
| 1,454
| 5.592784
| 0.221649
| 0.119816
| 0.154839
| 0.221198
| 0.781567
| 0.781567
| 0.705991
| 0.705991
| 0.705991
| 0.662673
| 0
| 0.009419
| 0.123796
| 1,454
| 67
| 82
| 21.701493
| 0.842229
| 0
| 0
| 0.585366
| 0
| 0
| 0.193948
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.292683
| false
| 0.292683
| 0.097561
| 0
| 0.390244
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
ac0af4a73b6853e742f09b3a60934ab081546828
| 2,282
|
py
|
Python
|
src/waldur_core/logging/migrations/0004_json_field.py
|
geant-multicloud/MCMS-mastermind
|
81333180f5e56a0bc88d7dad448505448e01f24e
|
[
"MIT"
] | 26
|
2017-10-18T13:49:58.000Z
|
2021-09-19T04:44:09.000Z
|
src/waldur_core/logging/migrations/0004_json_field.py
|
geant-multicloud/MCMS-mastermind
|
81333180f5e56a0bc88d7dad448505448e01f24e
|
[
"MIT"
] | 14
|
2018-12-10T14:14:51.000Z
|
2021-06-07T10:33:39.000Z
|
src/waldur_core/logging/migrations/0004_json_field.py
|
geant-multicloud/MCMS-mastermind
|
81333180f5e56a0bc88d7dad448505448e01f24e
|
[
"MIT"
] | 32
|
2017-09-24T03:10:45.000Z
|
2021-10-16T16:41:09.000Z
|
# Generated by Django 1.11.7 on 2018-07-24 07:35
import django.contrib.postgres.fields.jsonb
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('logging', '0003_system_notification'),
]
operations = [
migrations.AlterField(
model_name='emailhook',
name='event_groups',
field=django.contrib.postgres.fields.jsonb.JSONField(
default=list, verbose_name='List of event groups'
),
),
migrations.AlterField(
model_name='emailhook',
name='event_types',
field=django.contrib.postgres.fields.jsonb.JSONField(
verbose_name='List of event types'
),
),
migrations.AlterField(
model_name='pushhook',
name='event_groups',
field=django.contrib.postgres.fields.jsonb.JSONField(
default=list, verbose_name='List of event groups'
),
),
migrations.AlterField(
model_name='pushhook',
name='event_types',
field=django.contrib.postgres.fields.jsonb.JSONField(
verbose_name='List of event types'
),
),
migrations.AlterField(
model_name='systemnotification',
name='event_groups',
field=django.contrib.postgres.fields.jsonb.JSONField(
default=list, verbose_name='List of event groups'
),
),
migrations.AlterField(
model_name='systemnotification',
name='event_types',
field=django.contrib.postgres.fields.jsonb.JSONField(
verbose_name='List of event types'
),
),
migrations.AlterField(
model_name='webhook',
name='event_groups',
field=django.contrib.postgres.fields.jsonb.JSONField(
default=list, verbose_name='List of event groups'
),
),
migrations.AlterField(
model_name='webhook',
name='event_types',
field=django.contrib.postgres.fields.jsonb.JSONField(
verbose_name='List of event types'
),
),
]
| 32.6
| 65
| 0.55872
| 209
| 2,282
| 5.976077
| 0.205742
| 0.093675
| 0.151321
| 0.194556
| 0.877502
| 0.851882
| 0.851882
| 0.761409
| 0.761409
| 0.761409
| 0
| 0.013298
| 0.340929
| 2,282
| 69
| 66
| 33.072464
| 0.817154
| 0.020158
| 0
| 0.875
| 1
| 0
| 0.162489
| 0.010743
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.03125
| 0
| 0.078125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ac16e2cddc8a79d5faf145ffcf15d6ef36deb3d5
| 103
|
py
|
Python
|
booklib/ui/__init__.py
|
ahartoto/booklib
|
9a5f455c324927c679f1508bcb7adaf0a63ac0a6
|
[
"MIT"
] | null | null | null |
booklib/ui/__init__.py
|
ahartoto/booklib
|
9a5f455c324927c679f1508bcb7adaf0a63ac0a6
|
[
"MIT"
] | 7
|
2019-03-29T05:44:46.000Z
|
2019-06-05T06:18:34.000Z
|
booklib/ui/__init__.py
|
ahartoto/booklib
|
9a5f455c324927c679f1508bcb7adaf0a63ac0a6
|
[
"MIT"
] | 2
|
2019-06-06T05:31:56.000Z
|
2019-07-01T17:10:25.000Z
|
# Filename: booklib/ui/__init__.py
# FIXME - no local imports
from . import main
from . import window
| 17.166667
| 34
| 0.737864
| 15
| 103
| 4.8
| 0.866667
| 0.277778
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.174757
| 103
| 5
| 35
| 20.6
| 0.847059
| 0.553398
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.2
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ac59b771608a8e3a3a072ce4a94f0ce22f7e2700
| 58,526
|
bzl
|
Python
|
examples/ef/gen/nuget.bzl
|
purkhusid/rules_dotnet
|
934e62d65ed3657be20b2ae3a63e032a2de9ff84
|
[
"Apache-2.0"
] | 143
|
2016-03-15T20:37:54.000Z
|
2022-02-25T12:30:08.000Z
|
examples/ef/gen/nuget.bzl
|
purkhusid/rules_dotnet
|
934e62d65ed3657be20b2ae3a63e032a2de9ff84
|
[
"Apache-2.0"
] | 176
|
2016-03-17T13:28:50.000Z
|
2022-03-30T21:19:24.000Z
|
examples/ef/gen/nuget.bzl
|
purkhusid/rules_dotnet
|
934e62d65ed3657be20b2ae3a63e032a2de9ff84
|
[
"Apache-2.0"
] | 79
|
2016-03-16T11:34:56.000Z
|
2022-02-04T10:54:00.000Z
|
"Template file"
load("@io_bazel_rules_dotnet//dotnet:defs.bzl", "nuget_package") # @unused
def project_dotnet_repositories_nuget():
""" Declares used nugets """
### Generated by the tool
nuget_package(
name = "microsoft.bcl.hashcode",
package = "microsoft.bcl.hashcode",
version = "1.1.0",
sha256 = "205bd708c5768e86a1cadca54360464a965ddad757d11b2cfbe65c0a5553fabd",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.Bcl.HashCode.dll",
"netcoreapp2.1": "lib/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
"netcoreapp2.2": "lib/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
"netcoreapp3.0": "lib/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
"netcoreapp3.1": "lib/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
"net5.0": "lib/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
},
core_ref = {
"netcoreapp2.0": "ref/netstandard2.0/Microsoft.Bcl.HashCode.dll",
"netcoreapp2.1": "ref/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
"netcoreapp2.2": "ref/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
"netcoreapp3.0": "ref/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
"netcoreapp3.1": "ref/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
"net5.0": "ref/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.Bcl.HashCode.dll",
"lib/netstandard2.0/Microsoft.Bcl.HashCode.xml",
],
"netcoreapp2.1": [
"lib/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
"lib/netcoreapp2.1/Microsoft.Bcl.HashCode.xml",
],
"netcoreapp2.2": [
"lib/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
"lib/netcoreapp2.1/Microsoft.Bcl.HashCode.xml",
],
"netcoreapp3.0": [
"lib/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
"lib/netcoreapp2.1/Microsoft.Bcl.HashCode.xml",
],
"netcoreapp3.1": [
"lib/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
"lib/netcoreapp2.1/Microsoft.Bcl.HashCode.xml",
],
"net5.0": [
"lib/netcoreapp2.1/Microsoft.Bcl.HashCode.dll",
"lib/netcoreapp2.1/Microsoft.Bcl.HashCode.xml",
],
},
)
nuget_package(
name = "microsoft.entityframeworkcore.abstractions",
package = "microsoft.entityframeworkcore.abstractions",
version = "3.1.3",
sha256 = "66030b74b0d8bbbb3f79d314b42e3bb296a2386e66439613bf29c011ac2d33ec",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.dll",
"netcoreapp2.1": "lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.dll",
"netcoreapp2.2": "lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.dll",
"netcoreapp3.0": "lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.dll",
"netcoreapp3.1": "lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.dll",
"net5.0": "lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.dll",
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.dll",
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.dll",
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.dll",
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.dll",
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.xml",
],
"netcoreapp3.1": [
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.dll",
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.xml",
],
"net5.0": [
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.dll",
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.Abstractions.xml",
],
},
)
nuget_package(
name = "microsoft.entityframeworkcore.analyzers",
package = "microsoft.entityframeworkcore.analyzers",
version = "3.1.3",
sha256 = "f5a94e833e254b9610b9b09d4ce77db670aa614ed9d85c3f98ca98776330ea60",
)
nuget_package(
name = "microsoft.extensions.dependencyinjection.abstractions",
package = "microsoft.extensions.dependencyinjection.abstractions",
version = "3.1.3",
sha256 = "05cef9cd282f5001b460baf61fb40beb2eeccff15ff93823467a578dd3120e61",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll",
"netcoreapp2.1": "lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll",
"netcoreapp2.2": "lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll",
"netcoreapp3.0": "lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll",
"netcoreapp3.1": "lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll",
"net5.0": "lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll",
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.xml",
],
"netcoreapp3.1": [
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.xml",
],
"net5.0": [
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.Abstractions.xml",
],
},
)
nuget_package(
name = "microsoft.extensions.logging.abstractions",
package = "microsoft.extensions.logging.abstractions",
version = "3.1.3",
sha256 = "54f89082481fb23d5f8717264c625c66dc63cc1b2e46aa91d2a1e5bbc8f61d76",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.dll",
"netcoreapp2.1": "lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.dll",
"netcoreapp2.2": "lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.dll",
"netcoreapp3.0": "lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.dll",
"netcoreapp3.1": "lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.dll",
"net5.0": "lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.dll",
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.xml",
],
"netcoreapp3.1": [
"lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.xml",
],
"net5.0": [
"lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.Logging.Abstractions.xml",
],
},
)
nuget_package(
name = "microsoft.extensions.primitives",
package = "microsoft.extensions.primitives",
version = "3.1.3",
sha256 = "7b77cdb2f39328637eb66bf0982c07badc01c655c9f14e7185cc494b455d154b",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.Extensions.Primitives.dll",
"netcoreapp2.1": "lib/netstandard2.0/Microsoft.Extensions.Primitives.dll",
"netcoreapp2.2": "lib/netstandard2.0/Microsoft.Extensions.Primitives.dll",
"netcoreapp3.0": "lib/netstandard2.0/Microsoft.Extensions.Primitives.dll",
"netcoreapp3.1": "lib/netcoreapp3.1/Microsoft.Extensions.Primitives.dll",
"net5.0": "lib/netcoreapp3.1/Microsoft.Extensions.Primitives.dll",
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.Extensions.Primitives.dll",
"lib/netstandard2.0/Microsoft.Extensions.Primitives.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Microsoft.Extensions.Primitives.dll",
"lib/netstandard2.0/Microsoft.Extensions.Primitives.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/Microsoft.Extensions.Primitives.dll",
"lib/netstandard2.0/Microsoft.Extensions.Primitives.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.0/Microsoft.Extensions.Primitives.dll",
"lib/netstandard2.0/Microsoft.Extensions.Primitives.xml",
],
"netcoreapp3.1": [
"lib/netcoreapp3.1/Microsoft.Extensions.Primitives.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Primitives.xml",
],
"net5.0": [
"lib/netcoreapp3.1/Microsoft.Extensions.Primitives.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Primitives.xml",
],
},
)
nuget_package(
name = "microsoft.extensions.caching.abstractions",
package = "microsoft.extensions.caching.abstractions",
version = "3.1.3",
sha256 = "5d57e3c1ccb85f170060c8b6f55d2edf8fadd1aef532f2e2308388e5a3ff362f",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.Extensions.Caching.Abstractions.dll",
"netcoreapp2.1": "lib/netstandard2.0/Microsoft.Extensions.Caching.Abstractions.dll",
"netcoreapp2.2": "lib/netstandard2.0/Microsoft.Extensions.Caching.Abstractions.dll",
"netcoreapp3.0": "lib/netstandard2.0/Microsoft.Extensions.Caching.Abstractions.dll",
"netcoreapp3.1": "lib/netcoreapp3.1/Microsoft.Extensions.Caching.Abstractions.dll",
"net5.0": "lib/netcoreapp3.1/Microsoft.Extensions.Caching.Abstractions.dll",
},
core_deps = {
"netcoreapp2.0": [
"@microsoft.extensions.primitives//:netcoreapp2.0_core",
],
"netcoreapp2.1": [
"@microsoft.extensions.primitives//:netcoreapp2.1_core",
],
"netcoreapp2.2": [
"@microsoft.extensions.primitives//:netcoreapp2.2_core",
],
"netcoreapp3.0": [
"@microsoft.extensions.primitives//:netcoreapp3.0_core",
],
"netcoreapp3.1": [
"@microsoft.extensions.primitives//:netcoreapp3.1_core",
],
"net5.0": [
"@microsoft.extensions.primitives//:net5.0_core",
],
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.Extensions.Caching.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.Caching.Abstractions.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Microsoft.Extensions.Caching.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.Caching.Abstractions.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/Microsoft.Extensions.Caching.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.Caching.Abstractions.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.0/Microsoft.Extensions.Caching.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.Caching.Abstractions.xml",
],
"netcoreapp3.1": [
"lib/netcoreapp3.1/Microsoft.Extensions.Caching.Abstractions.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Caching.Abstractions.xml",
],
"net5.0": [
"lib/netcoreapp3.1/Microsoft.Extensions.Caching.Abstractions.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Caching.Abstractions.xml",
],
},
)
nuget_package(
name = "microsoft.extensions.configuration.abstractions",
package = "microsoft.extensions.configuration.abstractions",
version = "3.1.3",
sha256 = "a87aa1cdd6d6b6eab602cf3185e3a3a66ad486e3ae00ea7378fba4970eb94143",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.Extensions.Configuration.Abstractions.dll",
"netcoreapp2.1": "lib/netstandard2.0/Microsoft.Extensions.Configuration.Abstractions.dll",
"netcoreapp2.2": "lib/netstandard2.0/Microsoft.Extensions.Configuration.Abstractions.dll",
"netcoreapp3.0": "lib/netstandard2.0/Microsoft.Extensions.Configuration.Abstractions.dll",
"netcoreapp3.1": "lib/netcoreapp3.1/Microsoft.Extensions.Configuration.Abstractions.dll",
"net5.0": "lib/netcoreapp3.1/Microsoft.Extensions.Configuration.Abstractions.dll",
},
core_deps = {
"netcoreapp2.0": [
"@microsoft.extensions.primitives//:netcoreapp2.0_core",
],
"netcoreapp2.1": [
"@microsoft.extensions.primitives//:netcoreapp2.1_core",
],
"netcoreapp2.2": [
"@microsoft.extensions.primitives//:netcoreapp2.2_core",
],
"netcoreapp3.0": [
"@microsoft.extensions.primitives//:netcoreapp3.0_core",
],
"netcoreapp3.1": [
"@microsoft.extensions.primitives//:netcoreapp3.1_core",
],
"net5.0": [
"@microsoft.extensions.primitives//:net5.0_core",
],
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Abstractions.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Abstractions.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Abstractions.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Abstractions.dll",
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Abstractions.xml",
],
"netcoreapp3.1": [
"lib/netcoreapp3.1/Microsoft.Extensions.Configuration.Abstractions.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Configuration.Abstractions.xml",
],
"net5.0": [
"lib/netcoreapp3.1/Microsoft.Extensions.Configuration.Abstractions.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Configuration.Abstractions.xml",
],
},
)
nuget_package(
name = "microsoft.extensions.configuration",
package = "microsoft.extensions.configuration",
version = "3.1.3",
sha256 = "0534cf9650fa3697b95e54e48912caa919fb09f83622af68a9084d0335ff26aa",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.Extensions.Configuration.dll",
"netcoreapp2.1": "lib/netstandard2.0/Microsoft.Extensions.Configuration.dll",
"netcoreapp2.2": "lib/netstandard2.0/Microsoft.Extensions.Configuration.dll",
"netcoreapp3.0": "lib/netstandard2.0/Microsoft.Extensions.Configuration.dll",
"netcoreapp3.1": "lib/netcoreapp3.1/Microsoft.Extensions.Configuration.dll",
"net5.0": "lib/netcoreapp3.1/Microsoft.Extensions.Configuration.dll",
},
core_deps = {
"netcoreapp2.0": [
"@microsoft.extensions.configuration.abstractions//:netcoreapp2.0_core",
],
"netcoreapp2.1": [
"@microsoft.extensions.configuration.abstractions//:netcoreapp2.1_core",
],
"netcoreapp2.2": [
"@microsoft.extensions.configuration.abstractions//:netcoreapp2.2_core",
],
"netcoreapp3.0": [
"@microsoft.extensions.configuration.abstractions//:netcoreapp3.0_core",
],
"netcoreapp3.1": [
"@microsoft.extensions.configuration.abstractions//:netcoreapp3.1_core",
],
"net5.0": [
"@microsoft.extensions.configuration.abstractions//:net5.0_core",
],
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.Extensions.Configuration.dll",
"lib/netstandard2.0/Microsoft.Extensions.Configuration.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Microsoft.Extensions.Configuration.dll",
"lib/netstandard2.0/Microsoft.Extensions.Configuration.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/Microsoft.Extensions.Configuration.dll",
"lib/netstandard2.0/Microsoft.Extensions.Configuration.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.0/Microsoft.Extensions.Configuration.dll",
"lib/netstandard2.0/Microsoft.Extensions.Configuration.xml",
],
"netcoreapp3.1": [
"lib/netcoreapp3.1/Microsoft.Extensions.Configuration.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Configuration.xml",
],
"net5.0": [
"lib/netcoreapp3.1/Microsoft.Extensions.Configuration.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Configuration.xml",
],
},
)
nuget_package(
name = "microsoft.extensions.configuration.binder",
package = "microsoft.extensions.configuration.binder",
version = "3.1.3",
sha256 = "211e13f4db9af99074a3644bc3d9eaad93125dc6967e27bcd4972e88ce6ff8a6",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.Extensions.Configuration.Binder.dll",
"netcoreapp2.1": "lib/netstandard2.0/Microsoft.Extensions.Configuration.Binder.dll",
"netcoreapp2.2": "lib/netstandard2.0/Microsoft.Extensions.Configuration.Binder.dll",
"netcoreapp3.0": "lib/netstandard2.0/Microsoft.Extensions.Configuration.Binder.dll",
"netcoreapp3.1": "lib/netcoreapp3.1/Microsoft.Extensions.Configuration.Binder.dll",
"net5.0": "lib/netcoreapp3.1/Microsoft.Extensions.Configuration.Binder.dll",
},
core_deps = {
"netcoreapp2.0": [
"@microsoft.extensions.configuration//:netcoreapp2.0_core",
],
"netcoreapp2.1": [
"@microsoft.extensions.configuration//:netcoreapp2.1_core",
],
"netcoreapp2.2": [
"@microsoft.extensions.configuration//:netcoreapp2.2_core",
],
"netcoreapp3.0": [
"@microsoft.extensions.configuration//:netcoreapp3.0_core",
],
"netcoreapp3.1": [
"@microsoft.extensions.configuration//:netcoreapp3.1_core",
],
"net5.0": [
"@microsoft.extensions.configuration//:net5.0_core",
],
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Binder.dll",
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Binder.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Binder.dll",
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Binder.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Binder.dll",
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Binder.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Binder.dll",
"lib/netstandard2.0/Microsoft.Extensions.Configuration.Binder.xml",
],
"netcoreapp3.1": [
"lib/netcoreapp3.1/Microsoft.Extensions.Configuration.Binder.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Configuration.Binder.xml",
],
"net5.0": [
"lib/netcoreapp3.1/Microsoft.Extensions.Configuration.Binder.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Configuration.Binder.xml",
],
},
)
nuget_package(
name = "microsoft.extensions.options",
package = "microsoft.extensions.options",
version = "3.1.3",
sha256 = "141ab691a42d7ff85ac152337f59272b5b090a28a5308b9226d2401df364c1ba",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.Extensions.Options.dll",
"netcoreapp2.1": "lib/netstandard2.0/Microsoft.Extensions.Options.dll",
"netcoreapp2.2": "lib/netstandard2.0/Microsoft.Extensions.Options.dll",
"netcoreapp3.0": "lib/netstandard2.0/Microsoft.Extensions.Options.dll",
"netcoreapp3.1": "lib/netcoreapp3.1/Microsoft.Extensions.Options.dll",
"net5.0": "lib/netcoreapp3.1/Microsoft.Extensions.Options.dll",
},
core_deps = {
"netcoreapp2.0": [
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp2.0_core",
"@microsoft.extensions.primitives//:netcoreapp2.0_core",
],
"netcoreapp2.1": [
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp2.1_core",
"@microsoft.extensions.primitives//:netcoreapp2.1_core",
],
"netcoreapp2.2": [
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp2.2_core",
"@microsoft.extensions.primitives//:netcoreapp2.2_core",
],
"netcoreapp3.0": [
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp3.0_core",
"@microsoft.extensions.primitives//:netcoreapp3.0_core",
],
"netcoreapp3.1": [
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp3.1_core",
"@microsoft.extensions.primitives//:netcoreapp3.1_core",
],
"net5.0": [
"@microsoft.extensions.dependencyinjection.abstractions//:net5.0_core",
"@microsoft.extensions.primitives//:net5.0_core",
],
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.Extensions.Options.dll",
"lib/netstandard2.0/Microsoft.Extensions.Options.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Microsoft.Extensions.Options.dll",
"lib/netstandard2.0/Microsoft.Extensions.Options.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/Microsoft.Extensions.Options.dll",
"lib/netstandard2.0/Microsoft.Extensions.Options.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.0/Microsoft.Extensions.Options.dll",
"lib/netstandard2.0/Microsoft.Extensions.Options.xml",
],
"netcoreapp3.1": [
"lib/netcoreapp3.1/Microsoft.Extensions.Options.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Options.xml",
],
"net5.0": [
"lib/netcoreapp3.1/Microsoft.Extensions.Options.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Options.xml",
],
},
)
nuget_package(
name = "microsoft.extensions.caching.memory",
package = "microsoft.extensions.caching.memory",
version = "3.1.3",
sha256 = "666bb1008289816e926fc8ad98248745ed8926da58338c637e08782370399290",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.Extensions.Caching.Memory.dll",
"netcoreapp2.1": "lib/netstandard2.0/Microsoft.Extensions.Caching.Memory.dll",
"netcoreapp2.2": "lib/netstandard2.0/Microsoft.Extensions.Caching.Memory.dll",
"netcoreapp3.0": "lib/netstandard2.0/Microsoft.Extensions.Caching.Memory.dll",
"netcoreapp3.1": "lib/netcoreapp3.1/Microsoft.Extensions.Caching.Memory.dll",
"net5.0": "lib/netcoreapp3.1/Microsoft.Extensions.Caching.Memory.dll",
},
core_deps = {
"netcoreapp2.0": [
"@microsoft.extensions.caching.abstractions//:netcoreapp2.0_core",
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp2.0_core",
"@microsoft.extensions.logging.abstractions//:netcoreapp2.0_core",
"@microsoft.extensions.options//:netcoreapp2.0_core",
],
"netcoreapp2.1": [
"@microsoft.extensions.caching.abstractions//:netcoreapp2.1_core",
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp2.1_core",
"@microsoft.extensions.logging.abstractions//:netcoreapp2.1_core",
"@microsoft.extensions.options//:netcoreapp2.1_core",
],
"netcoreapp2.2": [
"@microsoft.extensions.caching.abstractions//:netcoreapp2.2_core",
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp2.2_core",
"@microsoft.extensions.logging.abstractions//:netcoreapp2.2_core",
"@microsoft.extensions.options//:netcoreapp2.2_core",
],
"netcoreapp3.0": [
"@microsoft.extensions.caching.abstractions//:netcoreapp3.0_core",
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp3.0_core",
"@microsoft.extensions.logging.abstractions//:netcoreapp3.0_core",
"@microsoft.extensions.options//:netcoreapp3.0_core",
],
"netcoreapp3.1": [
"@microsoft.extensions.caching.abstractions//:netcoreapp3.1_core",
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp3.1_core",
"@microsoft.extensions.logging.abstractions//:netcoreapp3.1_core",
"@microsoft.extensions.options//:netcoreapp3.1_core",
],
"net5.0": [
"@microsoft.extensions.caching.abstractions//:net5.0_core",
"@microsoft.extensions.dependencyinjection.abstractions//:net5.0_core",
"@microsoft.extensions.logging.abstractions//:net5.0_core",
"@microsoft.extensions.options//:net5.0_core",
],
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.Extensions.Caching.Memory.dll",
"lib/netstandard2.0/Microsoft.Extensions.Caching.Memory.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Microsoft.Extensions.Caching.Memory.dll",
"lib/netstandard2.0/Microsoft.Extensions.Caching.Memory.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/Microsoft.Extensions.Caching.Memory.dll",
"lib/netstandard2.0/Microsoft.Extensions.Caching.Memory.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.0/Microsoft.Extensions.Caching.Memory.dll",
"lib/netstandard2.0/Microsoft.Extensions.Caching.Memory.xml",
],
"netcoreapp3.1": [
"lib/netcoreapp3.1/Microsoft.Extensions.Caching.Memory.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Caching.Memory.xml",
],
"net5.0": [
"lib/netcoreapp3.1/Microsoft.Extensions.Caching.Memory.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Caching.Memory.xml",
],
},
)
nuget_package(
name = "system.buffers",
package = "system.buffers",
version = "4.4.0",
sha256 = "293c408586b0146d95e555f58f0de9cf1dc8ad05d1827d53b2f8233e0c406ea0",
)
nuget_package(
name = "system.componentmodel.annotations",
package = "system.componentmodel.annotations",
version = "4.7.0",
sha256 = "3f11bd96f7f6bff20022cecb84ee14afe1295ff2f99d86c4b340f6d60ca9a11b",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/System.ComponentModel.Annotations.dll",
"netcoreapp2.1": "lib/netstandard2.0/System.ComponentModel.Annotations.dll",
"netcoreapp2.2": "lib/netstandard2.0/System.ComponentModel.Annotations.dll",
"netcoreapp3.0": "lib/netstandard2.1/System.ComponentModel.Annotations.dll",
"netcoreapp3.1": "lib/netstandard2.1/System.ComponentModel.Annotations.dll",
"net5.0": "lib/netstandard2.1/System.ComponentModel.Annotations.dll",
},
core_ref = {
"netcoreapp2.0": "ref/netstandard2.0/System.ComponentModel.Annotations.dll",
"netcoreapp2.1": "ref/netstandard2.0/System.ComponentModel.Annotations.dll",
"netcoreapp2.2": "ref/netstandard2.0/System.ComponentModel.Annotations.dll",
"netcoreapp3.0": "ref/netstandard2.1/System.ComponentModel.Annotations.dll",
"netcoreapp3.1": "ref/netstandard2.1/System.ComponentModel.Annotations.dll",
"net5.0": "ref/netstandard2.1/System.ComponentModel.Annotations.dll",
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/System.ComponentModel.Annotations.dll",
],
"netcoreapp2.1": [
"lib/netstandard2.0/System.ComponentModel.Annotations.dll",
],
"netcoreapp2.2": [
"lib/netstandard2.0/System.ComponentModel.Annotations.dll",
],
"netcoreapp3.0": [
"lib/netstandard2.1/System.ComponentModel.Annotations.dll",
"lib/netstandard2.1/System.ComponentModel.Annotations.xml",
],
"netcoreapp3.1": [
"lib/netstandard2.1/System.ComponentModel.Annotations.dll",
"lib/netstandard2.1/System.ComponentModel.Annotations.xml",
],
"net5.0": [
"lib/netstandard2.1/System.ComponentModel.Annotations.dll",
"lib/netstandard2.1/System.ComponentModel.Annotations.xml",
],
},
)
nuget_package(
name = "system.numerics.vectors",
package = "system.numerics.vectors",
version = "4.4.0",
sha256 = "6ae5d02b67e52ff2699c1feb11c01c526e2f60c09830432258e0809486aabb65",
)
nuget_package(
name = "system.runtime.compilerservices.unsafe",
package = "system.runtime.compilerservices.unsafe",
version = "4.5.2",
sha256 = "f1e5175c658ed8b2fbb804cc6727b6882a503844e7da309c8d4846e9ca11e4ef",
core_lib = {
"netcoreapp2.0": "lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.dll",
"netcoreapp2.1": "lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.dll",
"netcoreapp2.2": "lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.dll",
"netcoreapp3.0": "lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.dll",
"netcoreapp3.1": "lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.dll",
"net5.0": "lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.dll",
},
core_ref = {
"netcoreapp2.0": "ref/netstandard2.0/System.Runtime.CompilerServices.Unsafe.dll",
"netcoreapp2.1": "ref/netstandard2.0/System.Runtime.CompilerServices.Unsafe.dll",
"netcoreapp2.2": "ref/netstandard2.0/System.Runtime.CompilerServices.Unsafe.dll",
"netcoreapp3.0": "ref/netstandard2.0/System.Runtime.CompilerServices.Unsafe.dll",
"netcoreapp3.1": "ref/netstandard2.0/System.Runtime.CompilerServices.Unsafe.dll",
"net5.0": "ref/netstandard2.0/System.Runtime.CompilerServices.Unsafe.dll",
},
core_files = {
"netcoreapp2.0": [
"lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.dll",
"lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.xml",
],
"netcoreapp2.1": [
"lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.dll",
"lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.xml",
],
"netcoreapp2.2": [
"lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.dll",
"lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.xml",
],
"netcoreapp3.0": [
"lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.dll",
"lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.xml",
],
"netcoreapp3.1": [
"lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.dll",
"lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.xml",
],
"net5.0": [
"lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.dll",
"lib/netcoreapp2.0/System.Runtime.CompilerServices.Unsafe.xml",
],
},
)
nuget_package(
name = "system.memory",
package = "system.memory",
version = "4.5.3",
sha256 = "0af97b45b45b46ef6a2b37910568dabd492c793da3859054595d523e2a545859",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/System.Memory.dll",
},
core_deps = {
"netcoreapp2.0": [
"@system.runtime.compilerservices.unsafe//:netcoreapp2.0_core",
],
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/System.Memory.dll",
"lib/netstandard2.0/System.Memory.xml",
],
},
)
nuget_package(
name = "system.collections.immutable",
package = "system.collections.immutable",
version = "1.7.0",
sha256 = "fd1301c5452e6e519a5844409d393be109ab4906d7fb1b3ce3216a99ac2633be",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/System.Collections.Immutable.dll",
"netcoreapp2.1": "lib/netstandard2.0/System.Collections.Immutable.dll",
"netcoreapp2.2": "lib/netstandard2.0/System.Collections.Immutable.dll",
"netcoreapp3.0": "lib/netstandard2.0/System.Collections.Immutable.dll",
"netcoreapp3.1": "lib/netstandard2.0/System.Collections.Immutable.dll",
"net5.0": "lib/netstandard2.0/System.Collections.Immutable.dll",
},
core_deps = {
"netcoreapp2.0": [
"@system.memory//:netcoreapp2.0_core",
],
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/System.Collections.Immutable.dll",
"lib/netstandard2.0/System.Collections.Immutable.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/System.Collections.Immutable.dll",
"lib/netstandard2.0/System.Collections.Immutable.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/System.Collections.Immutable.dll",
"lib/netstandard2.0/System.Collections.Immutable.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.0/System.Collections.Immutable.dll",
"lib/netstandard2.0/System.Collections.Immutable.xml",
],
"netcoreapp3.1": [
"lib/netstandard2.0/System.Collections.Immutable.dll",
"lib/netstandard2.0/System.Collections.Immutable.xml",
],
"net5.0": [
"lib/netstandard2.0/System.Collections.Immutable.dll",
"lib/netstandard2.0/System.Collections.Immutable.xml",
],
},
)
nuget_package(
name = "system.diagnostics.diagnosticsource",
package = "system.diagnostics.diagnosticsource",
version = "4.7.0",
sha256 = "c122533632467046b4b4ead75c718d5648cfa413e72cb3fb64aa50f45bd92033",
core_lib = {
"netcoreapp2.0": "lib/netstandard1.3/System.Diagnostics.DiagnosticSource.dll",
"netcoreapp2.1": "lib/netstandard1.3/System.Diagnostics.DiagnosticSource.dll",
"netcoreapp2.2": "lib/netstandard1.3/System.Diagnostics.DiagnosticSource.dll",
"netcoreapp3.0": "lib/netstandard1.3/System.Diagnostics.DiagnosticSource.dll",
"netcoreapp3.1": "lib/netstandard1.3/System.Diagnostics.DiagnosticSource.dll",
"net5.0": "lib/netstandard1.3/System.Diagnostics.DiagnosticSource.dll",
},
core_deps = {
"netcoreapp2.0": [
"@system.memory//:netcoreapp2.0_core",
],
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard1.3/System.Diagnostics.DiagnosticSource.dll",
"lib/netstandard1.3/System.Diagnostics.DiagnosticSource.xml",
],
"netcoreapp2.1": [
"lib/netstandard1.3/System.Diagnostics.DiagnosticSource.dll",
"lib/netstandard1.3/System.Diagnostics.DiagnosticSource.xml",
],
"netcoreapp2.2": [
"lib/netstandard1.3/System.Diagnostics.DiagnosticSource.dll",
"lib/netstandard1.3/System.Diagnostics.DiagnosticSource.xml",
],
"netcoreapp3.0": [
"lib/netstandard1.3/System.Diagnostics.DiagnosticSource.dll",
"lib/netstandard1.3/System.Diagnostics.DiagnosticSource.xml",
],
"netcoreapp3.1": [
"lib/netstandard1.3/System.Diagnostics.DiagnosticSource.dll",
"lib/netstandard1.3/System.Diagnostics.DiagnosticSource.xml",
],
"net5.0": [
"lib/netstandard1.3/System.Diagnostics.DiagnosticSource.dll",
"lib/netstandard1.3/System.Diagnostics.DiagnosticSource.xml",
],
},
)
nuget_package(
name = "system.threading.tasks.extensions",
package = "system.threading.tasks.extensions",
version = "4.5.2",
sha256 = "12a245f53a693074cabe947a7a6add03ad736a5316dc7c2b67b8fa067e1b06ea",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/System.Threading.Tasks.Extensions.dll",
},
core_deps = {
"netcoreapp2.0": [
"@system.runtime.compilerservices.unsafe//:netcoreapp2.0_core",
],
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/System.Threading.Tasks.Extensions.dll",
"lib/netstandard2.0/System.Threading.Tasks.Extensions.xml",
],
},
)
nuget_package(
name = "microsoft.bcl.asyncinterfaces",
package = "microsoft.bcl.asyncinterfaces",
version = "1.1.0",
sha256 = "4185688dfa9264a6c5f0fe83fda69c7f17ee98c691503f5210441bcf0ef705b7",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.Bcl.AsyncInterfaces.dll",
"netcoreapp2.1": "lib/netstandard2.0/Microsoft.Bcl.AsyncInterfaces.dll",
"netcoreapp2.2": "lib/netstandard2.0/Microsoft.Bcl.AsyncInterfaces.dll",
"netcoreapp3.0": "lib/netstandard2.1/Microsoft.Bcl.AsyncInterfaces.dll",
"netcoreapp3.1": "lib/netstandard2.1/Microsoft.Bcl.AsyncInterfaces.dll",
"net5.0": "lib/netstandard2.1/Microsoft.Bcl.AsyncInterfaces.dll",
},
core_ref = {
"netcoreapp2.0": "ref/netstandard2.0/Microsoft.Bcl.AsyncInterfaces.dll",
"netcoreapp2.1": "ref/netstandard2.0/Microsoft.Bcl.AsyncInterfaces.dll",
"netcoreapp2.2": "ref/netstandard2.0/Microsoft.Bcl.AsyncInterfaces.dll",
"netcoreapp3.0": "ref/netstandard2.1/Microsoft.Bcl.AsyncInterfaces.dll",
"netcoreapp3.1": "ref/netstandard2.1/Microsoft.Bcl.AsyncInterfaces.dll",
"net5.0": "ref/netstandard2.1/Microsoft.Bcl.AsyncInterfaces.dll",
},
core_deps = {
"netcoreapp2.0": [
"@system.threading.tasks.extensions//:netcoreapp2.0_core",
],
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.Bcl.AsyncInterfaces.dll",
"lib/netstandard2.0/Microsoft.Bcl.AsyncInterfaces.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Microsoft.Bcl.AsyncInterfaces.dll",
"lib/netstandard2.0/Microsoft.Bcl.AsyncInterfaces.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/Microsoft.Bcl.AsyncInterfaces.dll",
"lib/netstandard2.0/Microsoft.Bcl.AsyncInterfaces.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.1/Microsoft.Bcl.AsyncInterfaces.dll",
"lib/netstandard2.1/Microsoft.Bcl.AsyncInterfaces.xml",
],
"netcoreapp3.1": [
"lib/netstandard2.1/Microsoft.Bcl.AsyncInterfaces.dll",
"lib/netstandard2.1/Microsoft.Bcl.AsyncInterfaces.xml",
],
"net5.0": [
"lib/netstandard2.1/Microsoft.Bcl.AsyncInterfaces.dll",
"lib/netstandard2.1/Microsoft.Bcl.AsyncInterfaces.xml",
],
},
)
nuget_package(
name = "microsoft.extensions.dependencyinjection",
package = "microsoft.extensions.dependencyinjection",
version = "3.1.3",
sha256 = "01356b6b001f2c4913c198b236bd58e1c349759223b74c63964b862a32bb2b7f",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.dll",
"netcoreapp2.1": "lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.dll",
"netcoreapp2.2": "lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.dll",
"netcoreapp3.0": "lib/netstandard2.1/Microsoft.Extensions.DependencyInjection.dll",
"netcoreapp3.1": "lib/netcoreapp3.1/Microsoft.Extensions.DependencyInjection.dll",
"net5.0": "lib/netcoreapp3.1/Microsoft.Extensions.DependencyInjection.dll",
},
core_deps = {
"netcoreapp2.0": [
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp2.0_core",
"@microsoft.bcl.asyncinterfaces//:netcoreapp2.0_core",
],
"netcoreapp2.1": [
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp2.1_core",
"@microsoft.bcl.asyncinterfaces//:netcoreapp2.1_core",
],
"netcoreapp2.2": [
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp2.2_core",
"@microsoft.bcl.asyncinterfaces//:netcoreapp2.2_core",
],
"netcoreapp3.0": [
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp3.0_core",
],
"netcoreapp3.1": [
"@microsoft.extensions.dependencyinjection.abstractions//:netcoreapp3.1_core",
],
"net5.0": [
"@microsoft.extensions.dependencyinjection.abstractions//:net5.0_core",
],
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.dll",
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.dll",
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.dll",
"lib/netstandard2.0/Microsoft.Extensions.DependencyInjection.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.1/Microsoft.Extensions.DependencyInjection.dll",
"lib/netstandard2.1/Microsoft.Extensions.DependencyInjection.xml",
],
"netcoreapp3.1": [
"lib/netcoreapp3.1/Microsoft.Extensions.DependencyInjection.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.DependencyInjection.xml",
],
"net5.0": [
"lib/netcoreapp3.1/Microsoft.Extensions.DependencyInjection.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.DependencyInjection.xml",
],
},
)
nuget_package(
name = "microsoft.extensions.logging",
package = "microsoft.extensions.logging",
version = "3.1.3",
sha256 = "8856cfe776a4f2332db29b9584f37aeac0deaf50ad84185828e38bbec620a6ee",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.Extensions.Logging.dll",
"netcoreapp2.1": "lib/netstandard2.0/Microsoft.Extensions.Logging.dll",
"netcoreapp2.2": "lib/netstandard2.0/Microsoft.Extensions.Logging.dll",
"netcoreapp3.0": "lib/netstandard2.0/Microsoft.Extensions.Logging.dll",
"netcoreapp3.1": "lib/netcoreapp3.1/Microsoft.Extensions.Logging.dll",
"net5.0": "lib/netcoreapp3.1/Microsoft.Extensions.Logging.dll",
},
core_deps = {
"netcoreapp2.0": [
"@microsoft.extensions.configuration.binder//:netcoreapp2.0_core",
"@microsoft.extensions.dependencyinjection//:netcoreapp2.0_core",
"@microsoft.extensions.logging.abstractions//:netcoreapp2.0_core",
"@microsoft.extensions.options//:netcoreapp2.0_core",
],
"netcoreapp2.1": [
"@microsoft.extensions.configuration.binder//:netcoreapp2.1_core",
"@microsoft.extensions.dependencyinjection//:netcoreapp2.1_core",
"@microsoft.extensions.logging.abstractions//:netcoreapp2.1_core",
"@microsoft.extensions.options//:netcoreapp2.1_core",
],
"netcoreapp2.2": [
"@microsoft.extensions.configuration.binder//:netcoreapp2.2_core",
"@microsoft.extensions.dependencyinjection//:netcoreapp2.2_core",
"@microsoft.extensions.logging.abstractions//:netcoreapp2.2_core",
"@microsoft.extensions.options//:netcoreapp2.2_core",
],
"netcoreapp3.0": [
"@microsoft.extensions.configuration.binder//:netcoreapp3.0_core",
"@microsoft.extensions.dependencyinjection//:netcoreapp3.0_core",
"@microsoft.extensions.logging.abstractions//:netcoreapp3.0_core",
"@microsoft.extensions.options//:netcoreapp3.0_core",
],
"netcoreapp3.1": [
"@microsoft.extensions.configuration.binder//:netcoreapp3.1_core",
"@microsoft.extensions.dependencyinjection//:netcoreapp3.1_core",
"@microsoft.extensions.logging.abstractions//:netcoreapp3.1_core",
"@microsoft.extensions.options//:netcoreapp3.1_core",
],
"net5.0": [
"@microsoft.extensions.configuration.binder//:net5.0_core",
"@microsoft.extensions.dependencyinjection//:net5.0_core",
"@microsoft.extensions.logging.abstractions//:net5.0_core",
"@microsoft.extensions.options//:net5.0_core",
],
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.Extensions.Logging.dll",
"lib/netstandard2.0/Microsoft.Extensions.Logging.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Microsoft.Extensions.Logging.dll",
"lib/netstandard2.0/Microsoft.Extensions.Logging.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/Microsoft.Extensions.Logging.dll",
"lib/netstandard2.0/Microsoft.Extensions.Logging.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.0/Microsoft.Extensions.Logging.dll",
"lib/netstandard2.0/Microsoft.Extensions.Logging.xml",
],
"netcoreapp3.1": [
"lib/netcoreapp3.1/Microsoft.Extensions.Logging.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Logging.xml",
],
"net5.0": [
"lib/netcoreapp3.1/Microsoft.Extensions.Logging.dll",
"lib/netcoreapp3.1/Microsoft.Extensions.Logging.xml",
],
},
)
nuget_package(
name = "microsoft.entityframeworkcore",
package = "microsoft.entityframeworkcore",
version = "3.1.3",
sha256 = "e52e2c8fc7214da4aa6b375b6dd376a53881867757d41a7000c1c645b38c0f23",
core_lib = {
"netcoreapp2.0": "lib/netstandard2.0/Microsoft.EntityFrameworkCore.dll",
"netcoreapp2.1": "lib/netstandard2.0/Microsoft.EntityFrameworkCore.dll",
"netcoreapp2.2": "lib/netstandard2.0/Microsoft.EntityFrameworkCore.dll",
"netcoreapp3.0": "lib/netstandard2.0/Microsoft.EntityFrameworkCore.dll",
"netcoreapp3.1": "lib/netstandard2.0/Microsoft.EntityFrameworkCore.dll",
"net5.0": "lib/netstandard2.0/Microsoft.EntityFrameworkCore.dll",
},
core_deps = {
"netcoreapp2.0": [
"@microsoft.entityframeworkcore.abstractions//:netcoreapp2.0_core",
"@microsoft.entityframeworkcore.analyzers//:netcoreapp2.0_core",
"@microsoft.bcl.asyncinterfaces//:netcoreapp2.0_core",
"@microsoft.bcl.hashcode//:netcoreapp2.0_core",
"@microsoft.extensions.caching.memory//:netcoreapp2.0_core",
"@microsoft.extensions.dependencyinjection//:netcoreapp2.0_core",
"@microsoft.extensions.logging//:netcoreapp2.0_core",
"@system.collections.immutable//:netcoreapp2.0_core",
"@system.componentmodel.annotations//:netcoreapp2.0_core",
"@system.diagnostics.diagnosticsource//:netcoreapp2.0_core",
],
"netcoreapp2.1": [
"@microsoft.entityframeworkcore.abstractions//:netcoreapp2.1_core",
"@microsoft.entityframeworkcore.analyzers//:netcoreapp2.1_core",
"@microsoft.bcl.asyncinterfaces//:netcoreapp2.1_core",
"@microsoft.bcl.hashcode//:netcoreapp2.1_core",
"@microsoft.extensions.caching.memory//:netcoreapp2.1_core",
"@microsoft.extensions.dependencyinjection//:netcoreapp2.1_core",
"@microsoft.extensions.logging//:netcoreapp2.1_core",
"@system.collections.immutable//:netcoreapp2.1_core",
"@system.componentmodel.annotations//:netcoreapp2.1_core",
"@system.diagnostics.diagnosticsource//:netcoreapp2.1_core",
],
"netcoreapp2.2": [
"@microsoft.entityframeworkcore.abstractions//:netcoreapp2.2_core",
"@microsoft.entityframeworkcore.analyzers//:netcoreapp2.2_core",
"@microsoft.bcl.asyncinterfaces//:netcoreapp2.2_core",
"@microsoft.bcl.hashcode//:netcoreapp2.2_core",
"@microsoft.extensions.caching.memory//:netcoreapp2.2_core",
"@microsoft.extensions.dependencyinjection//:netcoreapp2.2_core",
"@microsoft.extensions.logging//:netcoreapp2.2_core",
"@system.collections.immutable//:netcoreapp2.2_core",
"@system.componentmodel.annotations//:netcoreapp2.2_core",
"@system.diagnostics.diagnosticsource//:netcoreapp2.2_core",
],
"netcoreapp3.0": [
"@microsoft.entityframeworkcore.abstractions//:netcoreapp3.0_core",
"@microsoft.entityframeworkcore.analyzers//:netcoreapp3.0_core",
"@microsoft.bcl.asyncinterfaces//:netcoreapp3.0_core",
"@microsoft.bcl.hashcode//:netcoreapp3.0_core",
"@microsoft.extensions.caching.memory//:netcoreapp3.0_core",
"@microsoft.extensions.dependencyinjection//:netcoreapp3.0_core",
"@microsoft.extensions.logging//:netcoreapp3.0_core",
"@system.collections.immutable//:netcoreapp3.0_core",
"@system.componentmodel.annotations//:netcoreapp3.0_core",
"@system.diagnostics.diagnosticsource//:netcoreapp3.0_core",
],
"netcoreapp3.1": [
"@microsoft.entityframeworkcore.abstractions//:netcoreapp3.1_core",
"@microsoft.entityframeworkcore.analyzers//:netcoreapp3.1_core",
"@microsoft.bcl.asyncinterfaces//:netcoreapp3.1_core",
"@microsoft.bcl.hashcode//:netcoreapp3.1_core",
"@microsoft.extensions.caching.memory//:netcoreapp3.1_core",
"@microsoft.extensions.dependencyinjection//:netcoreapp3.1_core",
"@microsoft.extensions.logging//:netcoreapp3.1_core",
"@system.collections.immutable//:netcoreapp3.1_core",
"@system.componentmodel.annotations//:netcoreapp3.1_core",
"@system.diagnostics.diagnosticsource//:netcoreapp3.1_core",
],
"net5.0": [
"@microsoft.entityframeworkcore.abstractions//:net5.0_core",
"@microsoft.entityframeworkcore.analyzers//:net5.0_core",
"@microsoft.bcl.asyncinterfaces//:net5.0_core",
"@microsoft.bcl.hashcode//:net5.0_core",
"@microsoft.extensions.caching.memory//:net5.0_core",
"@microsoft.extensions.dependencyinjection//:net5.0_core",
"@microsoft.extensions.logging//:net5.0_core",
"@system.collections.immutable//:net5.0_core",
"@system.componentmodel.annotations//:net5.0_core",
"@system.diagnostics.diagnosticsource//:net5.0_core",
],
},
core_files = {
"netcoreapp2.0": [
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.dll",
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.xml",
],
"netcoreapp2.1": [
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.dll",
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.xml",
],
"netcoreapp2.2": [
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.dll",
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.xml",
],
"netcoreapp3.0": [
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.dll",
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.xml",
],
"netcoreapp3.1": [
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.dll",
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.xml",
],
"net5.0": [
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.dll",
"lib/netstandard2.0/Microsoft.EntityFrameworkCore.xml",
],
},
)
### End of generated by the tool
return
| 50.759757
| 108
| 0.598144
| 4,876
| 58,526
| 7.128384
| 0.020919
| 0.179297
| 0.100811
| 0.13594
| 0.912826
| 0.852379
| 0.818229
| 0.807987
| 0.745037
| 0.607256
| 0
| 0.065755
| 0.270085
| 58,526
| 1,152
| 109
| 50.803819
| 0.747887
| 0.001623
| 0
| 0.662882
| 1
| 0
| 0.640676
| 0.573282
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000873
| true
| 0
| 0
| 0
| 0.001747
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3bd18dcea39ca0271f70dfe253d2038e2b305d0c
| 2,706
|
py
|
Python
|
tests/test_io_processes.py
|
iamjz/fastteradata
|
4ec661dee3352265833a1e10a44755ba2c977431
|
[
"MIT"
] | 17
|
2018-06-28T22:21:47.000Z
|
2021-06-21T14:59:55.000Z
|
tests/test_io_processes.py
|
iamjz/fastteradata
|
4ec661dee3352265833a1e10a44755ba2c977431
|
[
"MIT"
] | 6
|
2017-11-29T19:03:59.000Z
|
2020-05-11T14:04:47.000Z
|
tests/test_io_processes.py
|
iamjz/fastteradata
|
4ec661dee3352265833a1e10a44755ba2c977431
|
[
"MIT"
] | 14
|
2017-11-15T14:57:22.000Z
|
2021-11-19T20:23:54.000Z
|
import pytest
from fastteradata import *
script_files = ['C:/Users/u374781/Desktop/FastTeradata/dev_files/TEMP/script_CNSLDTD_DRUG_OKLAHOMA_2013_export.txt', \
'C:/Users/u374781/Desktop/FastTeradata/dev_files/TEMP/script_CNSLDTD_DRUG_OKLAHOMA_2014_export.txt', \
'C:/Users/u374781/Desktop/FastTeradata/dev_files/TEMP/script_CNSLDTD_DRUG_OKLAHOMA_2015_export.txt', \
'C:/Users/u374781/Desktop/FastTeradata/dev_files/TEMP/script_CNSLDTD_DRUG_OKLAHOMA_2016_export.txt', \
'C:/Users/u374781/Desktop/FastTeradata/dev_files/TEMP/script_CNSLDTD_DRUG_OKLAHOMA_2017_export.txt']
valid_concat_str_windows = "type C:\\Users\\u374781\\Desktop\\FastTeradata\\dev_files\\TEMP\\data\\CNSLDTD_DRUG_OKLAHOMA_2013_export.txt C:\\Users\\u374781\\Desktop\\FastTeradata\\dev_files\\TEMP\\data\\CNSLDTD_DRUG_OKLAHOMA_2014_export.txt C:\\Users\\u374781\\Desktop\\FastTeradata\\dev_files\\TEMP\\data\\CNSLDTD_DRUG_OKLAHOMA_2015_export.txt C:\\Users\\u374781\\Desktop\\FastTeradata\\dev_files\\TEMP\\data\\CNSLDTD_DRUG_OKLAHOMA_2016_export.txt C:\\Users\\u374781\\Desktop\\FastTeradata\\dev_files\\TEMP\\data\\CNSLDTD_DRUG_OKLAHOMA_2017_export.txt > C:\\Users\\u374781\\Desktop\\FastTeradata\\dev_files\\TEMP\\data\\CNSLDTD_DRUG_OKLAHOMA_export.txt"
valid_concat_str_linux = "cat C:/Users/u374781/Desktop/FastTeradata/dev_files/TEMP/data/CNSLDTD_DRUG_OKLAHOMA_2013_export.txt C:/Users/u374781/Desktop/FastTeradata/dev_files/TEMP/data/CNSLDTD_DRUG_OKLAHOMA_2014_export.txt C:/Users/u374781/Desktop/FastTeradata/dev_files/TEMP/data/CNSLDTD_DRUG_OKLAHOMA_2015_export.txt C:/Users/u374781/Desktop/FastTeradata/dev_files/TEMP/data/CNSLDTD_DRUG_OKLAHOMA_2016_export.txt C:/Users/u374781/Desktop/FastTeradata/dev_files/TEMP/data/CNSLDTD_DRUG_OKLAHOMA_2017_export.txt > C:/Users/u374781/Desktop/FastTeradata/dev_files/TEMP/data/CNSLDTD_DRUG_OKLAHOMA_export.txt"
valid_rm_cmd_windows = "del "
valid_rm_cmd_linux = "rm "
valid_data_file_windows = "C:\\Users\\u374781\\Desktop\\FastTeradata\\dev_files\\TEMP\\data\\CNSLDTD_DRUG_OKLAHOMA_export.txt"
valid_data_file_linux = "C:/Users/u374781/Desktop/FastTeradata/dev_files/TEMP/data/CNSLDTD_DRUG_OKLAHOMA_export.txt"
concat_str, data_files, remove_cmd = combine_partitioned_file(script_files, combine_type="vertical")
def test_combine_partitioned_files_valid_concat():
import os
if os.name == "nt":
assert valid_concat_str_windows == concat_str
else:
assert valid_concat_str_linux == concat_str
def test_combine_partitioned_files_valid_remove():
import os
if os.name == "nt":
assert valid_rm_cmd_windows == remove_cmd
else:
assert valid_rm_cmd_linux == remove_cmd
| 77.314286
| 654
| 0.800443
| 386
| 2,706
| 5.227979
| 0.119171
| 0.056492
| 0.122398
| 0.188305
| 0.832507
| 0.82111
| 0.77998
| 0.77998
| 0.751239
| 0.751239
| 0
| 0.069992
| 0.081301
| 2,706
| 34
| 655
| 79.588235
| 0.741754
| 0
| 0
| 0.230769
| 0
| 0.076923
| 0.699557
| 0.684035
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.076923
| false
| 0
| 0.153846
| 0
| 0.230769
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.