hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
79f742cdabbb8c0220bec876e45f2379ff5b758f
| 6,049
|
py
|
Python
|
python/test_gilded_rose.py
|
agooding-netizen/GildedRose-Refactoring-Kata
|
34c6503b3937a118f78da3f4e4a9b6db7e5ca676
|
[
"MIT"
] | null | null | null |
python/test_gilded_rose.py
|
agooding-netizen/GildedRose-Refactoring-Kata
|
34c6503b3937a118f78da3f4e4a9b6db7e5ca676
|
[
"MIT"
] | null | null | null |
python/test_gilded_rose.py
|
agooding-netizen/GildedRose-Refactoring-Kata
|
34c6503b3937a118f78da3f4e4a9b6db7e5ca676
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import unittest
from gilded_rose import Item, GildedRose
class GildedRoseTest(unittest.TestCase):
def test_concert_under_5(self):
items = [Item("Backstage passes to a TAFKAL80ETC concert", 2, 30)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(33, items[0].quality)
def test_concert_under_10(self):
items = [Item("Backstage passes to a TAFKAL80ETC concert", 8, 30)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(32, items[0].quality)
def test_concert(self):
items = [Item("Backstage passes to a TAFKAL80ETC concert", 12, 30)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(31, items[0].quality)
def test_concert_expired(self):
items = [Item("Backstage passes to a TAFKAL80ETC concert", 0, 24)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(0, items[0].quality)
def test_concert_max_10(self):
items = [Item("Backstage passes to a TAFKAL80ETC concert", 9, 50)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(50, items[0].quality)
def test_concert_max_5(self):
items = [Item("Backstage passes to a TAFKAL80ETC concert", 4, 50)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(50, items[0].quality)
def test_concert_max(self):
items = [Item("Backstage passes to a TAFKAL80ETC concert", 13, 50)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(50, items[0].quality)
def test_vest(self):
items = [Item("+5 Dexterity Vest", 10, 20)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(19, items[0].quality)
def test_vest_expired(self):
items = [Item("+5 Dexterity Vest", 0, 20)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(18, items[0].quality)
def test__vest_min(self):
items = [Item("+5 Dexterity Vest", 5, 0)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(0, items[0].quality)
def test_mongoose(self):
items = [Item("Elixir of the Mongoose", 10, 20)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(19, items[0].quality)
def test_mongoose_expired(self):
items = [Item("Elixir of the Mongoose", 0, 20)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(18, items[0].quality)
def test_mongoose_min(self):
items = [Item("Elixir of the Mongoose", 5, 0)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(0, items[0].quality)
def test_sulfuras(self):
items = [Item("Sulfuras, Hand of Ragnaros", 10, 80)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(80, items[0].quality)
def test_sulfuras_expired(self):
items = [Item("Sulfuras, Hand of Ragnaros", 0, 80)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(80, items[0].quality)
def test_brie(self):
items = [Item("Aged Brie", 10, 0)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(1, items[0].quality)
def test_brie_expired(self):
items = [Item("Aged Brie", 0, 0)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(2, items[0].quality)
def test_brie_max(self):
items = [Item("Aged Brie", 12, 49)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(50, items[0].quality)
def test_conjured(self):
items = [Item("Conjured Mana Cake", 10, 20)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(18, items[0].quality)
def test_conjured_expired(self):
items = [Item("Conjured Mana Cake", 0, 20)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(16, items[0].quality)
def test_conjured_min(self):
items = [Item("Conjured Mana Cake", 5, 0)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(0, items[0].quality)
def test_concert_sell_in(self):
items = [Item("Backstage passes to a TAFKAL80ETC concert", 10, 20)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(9, items[0].sell_in)
def test_vest_sell_in(self):
items = [Item("+5 Dexterity Vest", 10, 20)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(9, items[0].sell_in)
def test_mongoose_sell_in(self):
items = [Item("Elixir of the Mongoose", 10, 20)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(9, items[0].sell_in)
def test_sulfuras_sell_in(self):
items = [Item("Sulfuras, Hand of Ragnaros", 0, 20)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(0, items[0].sell_in)
def test_brie_sell_in(self):
items = [Item("Aged Brie", 10, 20)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(9, items[0].sell_in)
def test_conjured_sell_in(self):
items = [Item("Conjured Mana Cake", 10, 20)]
gilded_rose = GildedRose(items)
gilded_rose.update_quality()
self.assertEquals(9, items[0].sell_in)
if __name__ == '__main__':
unittest.main()
| 34.965318
| 75
| 0.642586
| 759
| 6,049
| 4.916996
| 0.092227
| 0.147374
| 0.094051
| 0.180868
| 0.932208
| 0.911308
| 0.804662
| 0.777331
| 0.758307
| 0.705788
| 0
| 0.040296
| 0.241032
| 6,049
| 172
| 76
| 35.168605
| 0.772599
| 0.003472
| 0
| 0.571429
| 0
| 0
| 0.112512
| 0
| 0
| 0
| 0
| 0
| 0.192857
| 1
| 0.192857
| false
| 0.057143
| 0.014286
| 0
| 0.214286
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
0319ac2bd885a373250b677223d60ab2ad10b322
| 173
|
py
|
Python
|
reports/__init__.py
|
shawnclq/Adversarial-Threat-Detector
|
fb5cc98ed46138e6630989721f6b4481cf1879a5
|
[
"MIT"
] | 29
|
2021-02-12T14:17:35.000Z
|
2022-02-22T17:11:05.000Z
|
reports/__init__.py
|
shawnclq/Adversarial-Threat-Detector
|
fb5cc98ed46138e6630989721f6b4481cf1879a5
|
[
"MIT"
] | null | null | null |
reports/__init__.py
|
shawnclq/Adversarial-Threat-Detector
|
fb5cc98ed46138e6630989721f6b4481cf1879a5
|
[
"MIT"
] | 6
|
2021-02-19T10:12:45.000Z
|
2021-12-09T02:18:54.000Z
|
# Module providing evasion attacks.
from reports.report_utility import ReportUtility
from reports.report_html import HtmlReport
from reports.report_ipynb import IpynbReport
| 34.6
| 48
| 0.872832
| 22
| 173
| 6.727273
| 0.636364
| 0.222973
| 0.344595
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098266
| 173
| 4
| 49
| 43.25
| 0.948718
| 0.190751
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0335b861256d5ca962ca3ef0fedbc5d0cbfc301e
| 68,613
|
py
|
Python
|
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_heteroFair/cmp_raytrace/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_heteroFair/cmp_raytrace/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
benchmarks/SimResults/_bigLittle_hrrs_splash_tugberk_heteroFair/cmp_raytrace/power.py
|
TugberkArkose/MLScheduler
|
e493b6cbf7b9d29a2c9300d7dd6f0c2f102e4061
|
[
"Unlicense"
] | null | null | null |
power = {'BUSES': {'Area': 1.33155,
'Bus/Area': 1.33155,
'Bus/Gate Leakage': 0.00662954,
'Bus/Peak Dynamic': 0.0,
'Bus/Runtime Dynamic': 0.0,
'Bus/Subthreshold Leakage': 0.0691322,
'Bus/Subthreshold Leakage with power gating': 0.0259246,
'Gate Leakage': 0.00662954,
'Peak Dynamic': 0.0,
'Runtime Dynamic': 0.0,
'Subthreshold Leakage': 0.0691322,
'Subthreshold Leakage with power gating': 0.0259246},
'Core': [{'Area': 32.6082,
'Execution Unit/Area': 8.2042,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0466431,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.239324,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.315017,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.122718,
'Execution Unit/Instruction Scheduler/Area': 2.17927,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.328073,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.00115349,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.20978,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.450727,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.017004,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00962066,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00730101,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 1.00996,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00529112,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 2.07911,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.780497,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0800117,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0455351,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 4.84781,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.841232,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.000856399,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.55892,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.447637,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.0178624,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00897339,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 1.67886,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.114878,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.0641291,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.397229,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 6.18764,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0595135,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.0163392,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.133049,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.120839,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.192563,
'Execution Unit/Register Files/Runtime Dynamic': 0.137178,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0442632,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00607074,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.334279,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.85709,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.0920413,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0345155,
'Execution Unit/Runtime Dynamic': 3.31783,
'Execution Unit/Subthreshold Leakage': 1.83518,
'Execution Unit/Subthreshold Leakage with power gating': 0.709678,
'Gate Leakage': 0.372997,
'Instruction Fetch Unit/Area': 5.86007,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00370645,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00370645,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00323215,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.00125331,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.00173585,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0123809,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0354002,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0590479,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.116165,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 6.43323,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.387954,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.394549,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 8.96874,
'Instruction Fetch Unit/Runtime Dynamic': 0.94645,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932587,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.408542,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0538804,
'L2/Runtime Dynamic': 0.0110246,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80969,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 4.63042,
'Load Store Unit/Data Cache/Runtime Dynamic': 1.63627,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0351387,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.109781,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.109781,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 5.15094,
'Load Store Unit/Runtime Dynamic': 2.28746,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.270702,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.541404,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591622,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283406,
'Memory Management Unit/Area': 0.434579,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0960729,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0968322,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00813591,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.399995,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0637469,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.724585,
'Memory Management Unit/Runtime Dynamic': 0.160579,
'Memory Management Unit/Subthreshold Leakage': 0.0769113,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0399462,
'Peak Dynamic': 25.6475,
'Renaming Unit/Area': 0.369768,
'Renaming Unit/FP Front End RAT/Area': 0.168486,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00489731,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 3.33511,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.207629,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0437281,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.024925,
'Renaming Unit/Free List/Area': 0.0414755,
'Renaming Unit/Free List/Gate Leakage': 4.15911e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0401324,
'Renaming Unit/Free List/Runtime Dynamic': 0.0255462,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000670426,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000377987,
'Renaming Unit/Gate Leakage': 0.00863632,
'Renaming Unit/Int Front End RAT/Area': 0.114751,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.00038343,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.86945,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.230114,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00611897,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00348781,
'Renaming Unit/Peak Dynamic': 4.56169,
'Renaming Unit/Runtime Dynamic': 0.463289,
'Renaming Unit/Subthreshold Leakage': 0.070483,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0362779,
'Runtime Dynamic': 7.18663,
'Subthreshold Leakage': 6.21877,
'Subthreshold Leakage with power gating': 2.58311},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0231676,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.220885,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.157012,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.169059,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.272686,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.137643,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.579389,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.169282,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.41247,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0296628,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00709111,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.058654,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0524431,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0883168,
'Execution Unit/Register Files/Runtime Dynamic': 0.0595342,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.129361,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.332118,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.5973,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.0017317,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.0017317,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00156419,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000636084,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000753349,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00578094,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.014607,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0504149,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.20682,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.165815,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.171232,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.58097,
'Instruction Fetch Unit/Runtime Dynamic': 0.407849,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0254974,
'L2/Runtime Dynamic': 0.00518343,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.72544,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.717634,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0481506,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0481507,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.95282,
'Load Store Unit/Runtime Dynamic': 1.00325,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.118731,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.237463,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0421381,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0424991,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.199388,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0272478,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.427883,
'Memory Management Unit/Runtime Dynamic': 0.069747,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 16.9891,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.078029,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00857709,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0845108,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.171117,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.25445,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0260819,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.223175,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.179711,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.167767,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.270601,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.13659,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.574958,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.164325,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.43877,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0339513,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00703689,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0590703,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0520421,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0930216,
'Execution Unit/Register Files/Runtime Dynamic': 0.059079,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.130967,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.337223,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.59981,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00159734,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00159734,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00144029,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000584364,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000747589,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.00538255,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0135641,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0500294,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.1823,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.160699,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.169922,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.55526,
'Instruction Fetch Unit/Runtime Dynamic': 0.399597,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.0295587,
'L2/Runtime Dynamic': 0.0058385,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.7387,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.724339,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0485798,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0485797,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.96811,
'Load Store Unit/Runtime Dynamic': 1.0125,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.11979,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.239579,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0425137,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0429429,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.197864,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0263873,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.427004,
'Memory Management Unit/Runtime Dynamic': 0.0693302,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.0082,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0893109,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00865606,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0836688,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.181636,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.26871,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328},
{'Area': 32.0201,
'Execution Unit/Area': 7.68434,
'Execution Unit/Complex ALUs/Area': 0.235435,
'Execution Unit/Complex ALUs/Gate Leakage': 0.0132646,
'Execution Unit/Complex ALUs/Peak Dynamic': 0.0261018,
'Execution Unit/Complex ALUs/Runtime Dynamic': 0.22319,
'Execution Unit/Complex ALUs/Subthreshold Leakage': 0.20111,
'Execution Unit/Complex ALUs/Subthreshold Leakage with power gating': 0.0754163,
'Execution Unit/Floating Point Units/Area': 4.6585,
'Execution Unit/Floating Point Units/Gate Leakage': 0.0656156,
'Execution Unit/Floating Point Units/Peak Dynamic': 0.18093,
'Execution Unit/Floating Point Units/Runtime Dynamic': 0.304033,
'Execution Unit/Floating Point Units/Subthreshold Leakage': 0.994829,
'Execution Unit/Floating Point Units/Subthreshold Leakage with power gating': 0.373061,
'Execution Unit/Gate Leakage': 0.120359,
'Execution Unit/Instruction Scheduler/Area': 1.66526,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Area': 0.275653,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Gate Leakage': 0.000977433,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Peak Dynamic': 1.04181,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Runtime Dynamic': 0.168754,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage': 0.0143453,
'Execution Unit/Instruction Scheduler/FP Instruction Window/Subthreshold Leakage with power gating': 0.00810519,
'Execution Unit/Instruction Scheduler/Gate Leakage': 0.00568913,
'Execution Unit/Instruction Scheduler/Instruction Window/Area': 0.805223,
'Execution Unit/Instruction Scheduler/Instruction Window/Gate Leakage': 0.00414562,
'Execution Unit/Instruction Scheduler/Instruction Window/Peak Dynamic': 1.6763,
'Execution Unit/Instruction Scheduler/Instruction Window/Runtime Dynamic': 0.272193,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage': 0.0625755,
'Execution Unit/Instruction Scheduler/Instruction Window/Subthreshold Leakage with power gating': 0.0355964,
'Execution Unit/Instruction Scheduler/Peak Dynamic': 3.82262,
'Execution Unit/Instruction Scheduler/ROB/Area': 0.584388,
'Execution Unit/Instruction Scheduler/ROB/Gate Leakage': 0.00056608,
'Execution Unit/Instruction Scheduler/ROB/Peak Dynamic': 1.10451,
'Execution Unit/Instruction Scheduler/ROB/Runtime Dynamic': 0.137394,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage': 0.00906853,
'Execution Unit/Instruction Scheduler/ROB/Subthreshold Leakage with power gating': 0.00364446,
'Execution Unit/Instruction Scheduler/Runtime Dynamic': 0.578341,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage': 0.0859892,
'Execution Unit/Instruction Scheduler/Subthreshold Leakage with power gating': 0.047346,
'Execution Unit/Integer ALUs/Area': 0.47087,
'Execution Unit/Integer ALUs/Gate Leakage': 0.0265291,
'Execution Unit/Integer ALUs/Peak Dynamic': 0.165265,
'Execution Unit/Integer ALUs/Runtime Dynamic': 0.101344,
'Execution Unit/Integer ALUs/Subthreshold Leakage': 0.40222,
'Execution Unit/Integer ALUs/Subthreshold Leakage with power gating': 0.150833,
'Execution Unit/Peak Dynamic': 4.44212,
'Execution Unit/Register Files/Area': 0.570804,
'Execution Unit/Register Files/Floating Point RF/Area': 0.208131,
'Execution Unit/Register Files/Floating Point RF/Gate Leakage': 0.000232788,
'Execution Unit/Register Files/Floating Point RF/Peak Dynamic': 0.0341817,
'Execution Unit/Register Files/Floating Point RF/Runtime Dynamic': 0.00707829,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage': 0.00399698,
'Execution Unit/Register Files/Floating Point RF/Subthreshold Leakage with power gating': 0.00176968,
'Execution Unit/Register Files/Gate Leakage': 0.000622708,
'Execution Unit/Register Files/Integer RF/Area': 0.362673,
'Execution Unit/Register Files/Integer RF/Gate Leakage': 0.00038992,
'Execution Unit/Register Files/Integer RF/Peak Dynamic': 0.0593315,
'Execution Unit/Register Files/Integer RF/Runtime Dynamic': 0.0523483,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage': 0.00614175,
'Execution Unit/Register Files/Integer RF/Subthreshold Leakage with power gating': 0.00246675,
'Execution Unit/Register Files/Peak Dynamic': 0.0935131,
'Execution Unit/Register Files/Runtime Dynamic': 0.0594266,
'Execution Unit/Register Files/Subthreshold Leakage': 0.0101387,
'Execution Unit/Register Files/Subthreshold Leakage with power gating': 0.00423643,
'Execution Unit/Results Broadcast Bus/Area Overhead': 0.0390912,
'Execution Unit/Results Broadcast Bus/Gate Leakage': 0.00537402,
'Execution Unit/Results Broadcast Bus/Peak Dynamic': 0.131522,
'Execution Unit/Results Broadcast Bus/Runtime Dynamic': 0.339643,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage': 0.081478,
'Execution Unit/Results Broadcast Bus/Subthreshold Leakage with power gating': 0.0305543,
'Execution Unit/Runtime Dynamic': 1.60598,
'Execution Unit/Subthreshold Leakage': 1.79543,
'Execution Unit/Subthreshold Leakage with power gating': 0.688821,
'Gate Leakage': 0.368936,
'Instruction Fetch Unit/Area': 5.85939,
'Instruction Fetch Unit/Branch Predictor/Area': 0.138516,
'Instruction Fetch Unit/Branch Predictor/Chooser/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Chooser/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Chooser/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Chooser/Runtime Dynamic': 0.00158782,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Chooser/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/Gate Leakage': 0.000757657,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Area': 0.0435221,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Gate Leakage': 0.000278362,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Peak Dynamic': 0.0168831,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Runtime Dynamic': 0.00158782,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage': 0.00759719,
'Instruction Fetch Unit/Branch Predictor/Global Predictor/Subthreshold Leakage with power gating': 0.0039236,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Area': 0.0257064,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Gate Leakage': 0.000154548,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Peak Dynamic': 0.0142575,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Runtime Dynamic': 0.00143097,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage': 0.00384344,
'Instruction Fetch Unit/Branch Predictor/L1_Local Predictor/Subthreshold Leakage with power gating': 0.00198631,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Area': 0.0151917,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Gate Leakage': 8.00196e-05,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Peak Dynamic': 0.00527447,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Runtime Dynamic': 0.000580191,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage': 0.00181347,
'Instruction Fetch Unit/Branch Predictor/L2_Local Predictor/Subthreshold Leakage with power gating': 0.000957045,
'Instruction Fetch Unit/Branch Predictor/Peak Dynamic': 0.0597838,
'Instruction Fetch Unit/Branch Predictor/RAS/Area': 0.0105732,
'Instruction Fetch Unit/Branch Predictor/RAS/Gate Leakage': 4.63858e-05,
'Instruction Fetch Unit/Branch Predictor/RAS/Peak Dynamic': 0.0117602,
'Instruction Fetch Unit/Branch Predictor/RAS/Runtime Dynamic': 0.000751987,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage': 0.000932505,
'Instruction Fetch Unit/Branch Predictor/RAS/Subthreshold Leakage with power gating': 0.000494733,
'Instruction Fetch Unit/Branch Predictor/Runtime Dynamic': 0.0053586,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage': 0.0199703,
'Instruction Fetch Unit/Branch Predictor/Subthreshold Leakage with power gating': 0.0103282,
'Instruction Fetch Unit/Branch Target Buffer/Area': 0.64954,
'Instruction Fetch Unit/Branch Target Buffer/Gate Leakage': 0.00272758,
'Instruction Fetch Unit/Branch Target Buffer/Peak Dynamic': 0.177867,
'Instruction Fetch Unit/Branch Target Buffer/Runtime Dynamic': 0.0135098,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage': 0.0811682,
'Instruction Fetch Unit/Branch Target Buffer/Subthreshold Leakage with power gating': 0.0435357,
'Instruction Fetch Unit/Gate Leakage': 0.0589979,
'Instruction Fetch Unit/Instruction Buffer/Area': 0.0226323,
'Instruction Fetch Unit/Instruction Buffer/Gate Leakage': 6.83558e-05,
'Instruction Fetch Unit/Instruction Buffer/Peak Dynamic': 0.606827,
'Instruction Fetch Unit/Instruction Buffer/Runtime Dynamic': 0.0503237,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage': 0.00151885,
'Instruction Fetch Unit/Instruction Buffer/Subthreshold Leakage with power gating': 0.000701682,
'Instruction Fetch Unit/Instruction Cache/Area': 3.14635,
'Instruction Fetch Unit/Instruction Cache/Gate Leakage': 0.029931,
'Instruction Fetch Unit/Instruction Cache/Peak Dynamic': 3.20102,
'Instruction Fetch Unit/Instruction Cache/Runtime Dynamic': 0.160676,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage': 0.367022,
'Instruction Fetch Unit/Instruction Cache/Subthreshold Leakage with power gating': 0.180386,
'Instruction Fetch Unit/Instruction Decoder/Area': 1.85799,
'Instruction Fetch Unit/Instruction Decoder/Gate Leakage': 0.0222493,
'Instruction Fetch Unit/Instruction Decoder/Peak Dynamic': 1.37404,
'Instruction Fetch Unit/Instruction Decoder/Runtime Dynamic': 0.170922,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage': 0.442943,
'Instruction Fetch Unit/Instruction Decoder/Subthreshold Leakage with power gating': 0.166104,
'Instruction Fetch Unit/Peak Dynamic': 5.57489,
'Instruction Fetch Unit/Runtime Dynamic': 0.40079,
'Instruction Fetch Unit/Subthreshold Leakage': 0.932286,
'Instruction Fetch Unit/Subthreshold Leakage with power gating': 0.40843,
'L2/Area': 4.53318,
'L2/Gate Leakage': 0.015464,
'L2/Peak Dynamic': 0.029635,
'L2/Runtime Dynamic': 0.00597297,
'L2/Subthreshold Leakage': 0.834142,
'L2/Subthreshold Leakage with power gating': 0.401066,
'Load Store Unit/Area': 8.80901,
'Load Store Unit/Data Cache/Area': 6.84535,
'Load Store Unit/Data Cache/Gate Leakage': 0.0279261,
'Load Store Unit/Data Cache/Peak Dynamic': 2.75741,
'Load Store Unit/Data Cache/Runtime Dynamic': 0.733466,
'Load Store Unit/Data Cache/Subthreshold Leakage': 0.527675,
'Load Store Unit/Data Cache/Subthreshold Leakage with power gating': 0.25085,
'Load Store Unit/Gate Leakage': 0.0350888,
'Load Store Unit/LoadQ/Area': 0.0836782,
'Load Store Unit/LoadQ/Gate Leakage': 0.00059896,
'Load Store Unit/LoadQ/Peak Dynamic': 0.0491852,
'Load Store Unit/LoadQ/Runtime Dynamic': 0.0491853,
'Load Store Unit/LoadQ/Subthreshold Leakage': 0.00941961,
'Load Store Unit/LoadQ/Subthreshold Leakage with power gating': 0.00536918,
'Load Store Unit/Peak Dynamic': 2.98968,
'Load Store Unit/Runtime Dynamic': 1.02522,
'Load Store Unit/StoreQ/Area': 0.322079,
'Load Store Unit/StoreQ/Gate Leakage': 0.00329971,
'Load Store Unit/StoreQ/Peak Dynamic': 0.121282,
'Load Store Unit/StoreQ/Runtime Dynamic': 0.242565,
'Load Store Unit/StoreQ/Subthreshold Leakage': 0.0345621,
'Load Store Unit/StoreQ/Subthreshold Leakage with power gating': 0.0197004,
'Load Store Unit/Subthreshold Leakage': 0.591321,
'Load Store Unit/Subthreshold Leakage with power gating': 0.283293,
'Memory Management Unit/Area': 0.4339,
'Memory Management Unit/Dtlb/Area': 0.0879726,
'Memory Management Unit/Dtlb/Gate Leakage': 0.00088729,
'Memory Management Unit/Dtlb/Peak Dynamic': 0.0430435,
'Memory Management Unit/Dtlb/Runtime Dynamic': 0.0434738,
'Memory Management Unit/Dtlb/Subthreshold Leakage': 0.0155699,
'Memory Management Unit/Dtlb/Subthreshold Leakage with power gating': 0.00887485,
'Memory Management Unit/Gate Leakage': 0.00808595,
'Memory Management Unit/Itlb/Area': 0.301552,
'Memory Management Unit/Itlb/Gate Leakage': 0.00393464,
'Memory Management Unit/Itlb/Peak Dynamic': 0.199028,
'Memory Management Unit/Itlb/Runtime Dynamic': 0.0263842,
'Memory Management Unit/Itlb/Subthreshold Leakage': 0.0413758,
'Memory Management Unit/Itlb/Subthreshold Leakage with power gating': 0.0235842,
'Memory Management Unit/Peak Dynamic': 0.429078,
'Memory Management Unit/Runtime Dynamic': 0.069858,
'Memory Management Unit/Subthreshold Leakage': 0.0766103,
'Memory Management Unit/Subthreshold Leakage with power gating': 0.0398333,
'Peak Dynamic': 17.0549,
'Renaming Unit/Area': 0.303608,
'Renaming Unit/FP Front End RAT/Area': 0.131045,
'Renaming Unit/FP Front End RAT/Gate Leakage': 0.00351123,
'Renaming Unit/FP Front End RAT/Peak Dynamic': 2.51468,
'Renaming Unit/FP Front End RAT/Runtime Dynamic': 0.0899166,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage': 0.0308571,
'Renaming Unit/FP Front End RAT/Subthreshold Leakage with power gating': 0.0175885,
'Renaming Unit/Free List/Area': 0.0340654,
'Renaming Unit/Free List/Gate Leakage': 2.5481e-05,
'Renaming Unit/Free List/Peak Dynamic': 0.0306032,
'Renaming Unit/Free List/Runtime Dynamic': 0.00870797,
'Renaming Unit/Free List/Subthreshold Leakage': 0.000370144,
'Renaming Unit/Free List/Subthreshold Leakage with power gating': 0.000201064,
'Renaming Unit/Gate Leakage': 0.00708398,
'Renaming Unit/Int Front End RAT/Area': 0.0941223,
'Renaming Unit/Int Front End RAT/Gate Leakage': 0.000283242,
'Renaming Unit/Int Front End RAT/Peak Dynamic': 0.731965,
'Renaming Unit/Int Front End RAT/Runtime Dynamic': 0.0841931,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage': 0.00435488,
'Renaming Unit/Int Front End RAT/Subthreshold Leakage with power gating': 0.00248228,
'Renaming Unit/Peak Dynamic': 3.58947,
'Renaming Unit/Runtime Dynamic': 0.182818,
'Renaming Unit/Subthreshold Leakage': 0.0552466,
'Renaming Unit/Subthreshold Leakage with power gating': 0.0276461,
'Runtime Dynamic': 3.29063,
'Subthreshold Leakage': 6.16288,
'Subthreshold Leakage with power gating': 2.55328}],
'DRAM': {'Area': 0,
'Gate Leakage': 0,
'Peak Dynamic': 0.7104941532526752,
'Runtime Dynamic': 0.7104941532526752,
'Subthreshold Leakage': 4.252,
'Subthreshold Leakage with power gating': 4.252},
'L3': [{'Area': 61.9075,
'Gate Leakage': 0.0484137,
'Peak Dynamic': 0.0732315,
'Runtime Dynamic': 0.041044,
'Subthreshold Leakage': 6.80085,
'Subthreshold Leakage with power gating': 3.32364}],
'Processor': {'Area': 191.908,
'Gate Leakage': 1.53485,
'Peak Dynamic': 76.7729,
'Peak Power': 109.885,
'Runtime Dynamic': 17.0415,
'Subthreshold Leakage': 31.5774,
'Subthreshold Leakage with power gating': 13.9484,
'Total Cores/Area': 128.669,
'Total Cores/Gate Leakage': 1.4798,
'Total Cores/Peak Dynamic': 76.6996,
'Total Cores/Runtime Dynamic': 17.0004,
'Total Cores/Subthreshold Leakage': 24.7074,
'Total Cores/Subthreshold Leakage with power gating': 10.2429,
'Total L3s/Area': 61.9075,
'Total L3s/Gate Leakage': 0.0484137,
'Total L3s/Peak Dynamic': 0.0732315,
'Total L3s/Runtime Dynamic': 0.041044,
'Total L3s/Subthreshold Leakage': 6.80085,
'Total L3s/Subthreshold Leakage with power gating': 3.32364,
'Total Leakage': 33.1122,
'Total NoCs/Area': 1.33155,
'Total NoCs/Gate Leakage': 0.00662954,
'Total NoCs/Peak Dynamic': 0.0,
'Total NoCs/Runtime Dynamic': 0.0,
'Total NoCs/Subthreshold Leakage': 0.0691322,
'Total NoCs/Subthreshold Leakage with power gating': 0.0259246}}
| 75.068928
| 124
| 0.682101
| 8,082
| 68,613
| 5.78483
| 0.067681
| 0.123543
| 0.112934
| 0.093427
| 0.93887
| 0.930999
| 0.91761
| 0.88713
| 0.862897
| 0.842384
| 0
| 0.132013
| 0.224316
| 68,613
| 914
| 125
| 75.068928
| 0.746439
| 0
| 0
| 0.642232
| 0
| 0
| 0.657373
| 0.048095
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
035665adfd99f2f146a8f5a3c9b9a8ba5bef9ce8
| 48
|
py
|
Python
|
bindsnet_master/bindsnet/preprocessing/__init__.py
|
Singular-Brain/ProjectBrain
|
2d22d45c13a86825c0dcaf517a59e02f2c4f6164
|
[
"MIT"
] | 6
|
2021-06-01T03:43:35.000Z
|
2022-02-11T10:41:06.000Z
|
bindsnet_master/bindsnet/preprocessing/__init__.py
|
Singular-Brain/ProjectBrain
|
2d22d45c13a86825c0dcaf517a59e02f2c4f6164
|
[
"MIT"
] | 1
|
2022-03-31T03:22:14.000Z
|
2022-03-31T03:22:14.000Z
|
bindsnet_master/bindsnet/preprocessing/__init__.py
|
Singular-Brain/ProjectBrain
|
2d22d45c13a86825c0dcaf517a59e02f2c4f6164
|
[
"MIT"
] | 3
|
2021-10-30T02:30:40.000Z
|
2021-11-16T04:23:12.000Z
|
from .preprocessing import AbstractPreprocessor
| 24
| 47
| 0.895833
| 4
| 48
| 10.75
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 48
| 1
| 48
| 48
| 0.977273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ceefb58123f44f6ea9f47f949e1f27b96df4abd1
| 2,397
|
py
|
Python
|
teraserver/python/tests/modules/FlaskModule/API/user/test_UserRefreshToken.py
|
introlab/opentera
|
bfc4de672c9de40b7c9a659be2138731e7ee4e94
|
[
"Apache-2.0"
] | 10
|
2020-03-16T14:46:06.000Z
|
2022-02-11T16:07:38.000Z
|
teraserver/python/tests/modules/FlaskModule/API/user/test_UserRefreshToken.py
|
introlab/opentera
|
bfc4de672c9de40b7c9a659be2138731e7ee4e94
|
[
"Apache-2.0"
] | 114
|
2019-09-16T13:02:50.000Z
|
2022-03-22T19:17:36.000Z
|
teraserver/python/tests/modules/FlaskModule/API/user/test_UserRefreshToken.py
|
introlab/opentera
|
bfc4de672c9de40b7c9a659be2138731e7ee4e94
|
[
"Apache-2.0"
] | null | null | null |
from tests.modules.FlaskModule.API.BaseAPITest import BaseAPITest
import datetime
class UserRefreshTokenTest(BaseAPITest):
login_endpoint = '/api/user/login'
test_endpoint = '/api/user/refresh_token'
def setUp(self):
pass
def tearDown(self):
pass
def test_no_token_http_auth_refresh(self):
response = self._request_with_http_auth(username='admin', password='admin')
self.assertEqual(401, response.status_code)
def test_valid_token_refresh(self):
response = self._login_with_http_auth(username='admin', password='admin')
self.assertEqual(200, response.status_code)
login_info = response.json()
self.assertTrue(login_info.__contains__('user_token'))
token = login_info['user_token']
response = self._request_with_token_auth(token=token)
self.assertEqual(200, response.status_code)
token_info = response.json()
self.assertTrue(token_info.__contains__('refresh_token'))
refresh_token = token_info['refresh_token']
self.assertGreater(len(refresh_token), 0)
def test_invalid_token_refresh_with_disabled_token(self):
response = self._login_with_http_auth(username='admin', password='admin')
self.assertEqual(200, response.status_code)
login_info = response.json()
self.assertTrue(login_info.__contains__('user_token'))
login_token = login_info['user_token']
response = self._request_with_token_auth(token=login_token)
self.assertEqual(200, response.status_code)
token_info = response.json()
self.assertTrue(token_info.__contains__('refresh_token'))
refresh_token = token_info['refresh_token']
self.assertGreater(len(refresh_token), 0)
# This should not work, token should be disabled
response = self._request_with_token_auth(token=login_token)
self.assertEqual(401, response.status_code)
def test_invalid_token_refresh_with_no_token(self):
response = self._login_with_http_auth(username='admin', password='admin')
self.assertEqual(200, response.status_code)
login_info = response.json()
self.assertTrue(login_info.__contains__('user_token'))
login_token = login_info['user_token']
response = self._request_with_token_auth(token='')
self.assertEqual(401, response.status_code)
| 42.803571
| 83
| 0.71214
| 289
| 2,397
| 5.50519
| 0.17301
| 0.067882
| 0.090509
| 0.072282
| 0.801383
| 0.801383
| 0.76807
| 0.745443
| 0.710245
| 0.676933
| 0
| 0.01334
| 0.1869
| 2,397
| 55
| 84
| 43.581818
| 0.802976
| 0.019191
| 0
| 0.673913
| 0
| 0
| 0.080885
| 0.009791
| 0
| 0
| 0
| 0
| 0.326087
| 1
| 0.130435
| false
| 0.130435
| 0.043478
| 0
| 0.23913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
cefb5f24c4f3c4801369be89f481998cbadf0585
| 12,408
|
py
|
Python
|
tests/test_utils.py
|
jannikluhn/ethereum-accounts
|
164da163b99fe63ffd6d23a21b90e53e0c245952
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
jannikluhn/ethereum-accounts
|
164da163b99fe63ffd6d23a21b90e53e0c245952
|
[
"MIT"
] | null | null | null |
tests/test_utils.py
|
jannikluhn/ethereum-accounts
|
164da163b99fe63ffd6d23a21b90e53e0c245952
|
[
"MIT"
] | null | null | null |
import pytest
from ethereum.utils import (
privtoaddr,
privtopub,
)
from eth_utils import (
remove_0x_prefix,
encode_hex,
decode_hex,
is_0x_prefixed,
is_checksum_address,
is_hex,
is_same_address,
)
from eth_accounts import (
random_private_key,
private_key_to_address,
private_key_to_public_key,
public_key_to_address,
)
from eth_accounts.utils import (
normalize_message,
normalize_password,
normalize_private_key,
normalize_public_key,
normalize_signature,
)
@pytest.mark.parametrize('key', [random_private_key() for _ in range(100)])
def test_random_private_key(key):
assert is_hex(key)
assert is_0x_prefixed(key)
assert len(key) == 64 + 2
@pytest.mark.parametrize('key', [random_private_key() for _ in range(100)])
def test_private_key_to_public_key(key):
# tests against pyethereum
reference = encode_hex(privtopub(decode_hex(key)))
public_key = private_key_to_public_key(key)
assert is_0x_prefixed(public_key)
assert is_hex(public_key)
assert len(public_key) == 130 + 2
assert public_key == reference
assert private_key_to_public_key(decode_hex(key)) == reference
assert private_key_to_public_key(remove_0x_prefix(key)) == reference
@pytest.mark.parametrize('key', [random_private_key() for _ in range(100)])
def test_private_key_to_address(key):
# tests against pyethereum
reference = encode_hex(privtoaddr(decode_hex(key)))
address = private_key_to_address(key)
assert is_0x_prefixed(address)
assert is_checksum_address(address)
assert is_same_address(address, reference)
assert is_same_address(private_key_to_address(decode_hex(key)), reference)
assert is_same_address(private_key_to_address(remove_0x_prefix(key)), reference)
@pytest.mark.parametrize('key', [random_private_key() for _ in range(100)])
def test_public_key_to_address(key):
# tests against pyethereum
public_key = encode_hex(privtopub(decode_hex(key)))
reference = privtoaddr(decode_hex(key))
address = public_key_to_address(public_key)
assert is_0x_prefixed(address)
assert is_checksum_address(address)
assert is_same_address(address, reference)
assert is_same_address(public_key_to_address(decode_hex(public_key)), reference)
assert is_same_address(public_key_to_address(remove_0x_prefix(public_key)), reference)
@pytest.mark.parametrize(('input', 'output', 'error'), [
('0x0000000000000000000000000000000000000000000000000000000000000000', None, ValueError),
('0x0000000000000000000000000000000000000000000000000000000000000001', None, None),
('0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364140', None, None),
('0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141', None, ValueError),
('0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa'.upper(),
'0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', None),
('aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa',
'0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa', None),
('nohex', None, ValueError),
(-1, None, ValueError),
(0, None, ValueError),
(1, '0x0000000000000000000000000000000000000000000000000000000000000001', None),
(0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364140,
'0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364140',
None),
(0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141, None, ValueError),
('0x01', '0x0000000000000000000000000000000000000000000000000000000000000001', None),
('0x000000000000000000000000000000000000000000000000000000000000000001',
'0x0000000000000000000000000000000000000000000000000000000000000001', None),
(b'\0', None, ValueError),
(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00', None, ValueError),
(b'\x01', '0x0000000000000000000000000000000000000000000000000000000000000001', None),
(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01',
'0x0000000000000000000000000000000000000000000000000000000000000001', None),
(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01',
'0x0000000000000000000000000000000000000000000000000000000000000001', None),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xba'
b'\xae\xdc\xe6\xafH\xa0;\xbf\xd2^\x8c\xd06A@',
'0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364140', None),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xfe\xba'
b'\xae\xdc\xe6\xafH\xa0;\xbf\xd2^\x8c\xd06AA', None, ValueError),
(None, None, TypeError),
(1.0, None, TypeError),
([], None, TypeError)
])
def test_private_key_normalization(input, output, error):
if error is None:
if output is None:
output = input
assert output == normalize_private_key(input)
else:
with pytest.raises(error):
normalize_private_key(input)
@pytest.mark.parametrize(['input', 'output', 'error'], [
('0x0000000000000000000000000000000000000000000000000000000000000000'
'000000000000000000000000000000000000000000000000000000000000000000', None, None),
('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None, None),
('0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF'
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None),
('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None, ValueError),
('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None, ValueError),
('nohex', None, ValueError),
(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
'0x0000000000000000000000000000000000000000000000000000000000000000'
'000000000000000000000000000000000000000000000000000000000000000000',
None),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff',
'0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff',
None, ValueError),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff',
None, ValueError),
(None, None, TypeError),
(5, None, TypeError),
(5.0, None, TypeError),
([], None, TypeError)
])
def test_public_key_normalization(input, output, error):
if error is None:
if output is None:
output = input
assert output == normalize_public_key(input)
else:
with pytest.raises(error):
normalize_public_key(input)
@pytest.mark.parametrize(['input', 'output', 'error'], [
(b'', None, None),
(b'password', None, None),
('password', None, TypeError),
(None, None, TypeError),
(5, None, TypeError),
(5.0, None, TypeError),
([], None, TypeError),
([b'password'], None, TypeError)
])
def test_password_normalization(input, output, error):
if error is None:
if output is None:
output = input
assert output == normalize_password(input)
else:
with pytest.raises(error):
normalize_password(input)
@pytest.mark.parametrize(['input', 'output', 'error'], [
(b'', None, None),
(b'message', None, None),
('0xabcd', b'\xab\xcd', None),
('abcd', b'\xab\xcd', None),
('0xAbCd', b'\xab\xcd', None),
('nohex', None, ValueError),
(None, None, TypeError),
(5, None, TypeError),
(5.0, None, TypeError),
([], None, TypeError),
([b'message'], None, TypeError)
])
def test_message_normalization(input, output, error):
if error is None:
if output is None:
output = input
assert output == normalize_message(input)
else:
with pytest.raises(error):
normalize_message(input)
@pytest.mark.parametrize(['input', 'output', 'error'], [
('0x0000000000000000000000000000000000000000000000000000000000000000'
'000000000000000000000000000000000000000000000000000000000000000000', None, None),
('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None, None),
('0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF'
'FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF',
'0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None),
('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None, ValueError),
('0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None, ValueError),
('nohex', None, ValueError),
(b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00',
'0x0000000000000000000000000000000000000000000000000000000000000000'
'000000000000000000000000000000000000000000000000000000000000000000',
None),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff',
'0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff'
'ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff', None),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff',
None, ValueError),
(b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff'
b'\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff\xff',
None, ValueError),
(None, None, TypeError),
(5, None, TypeError),
(5.0, None, TypeError),
([], None, TypeError)
])
def test_signature_normalization(input, output, error):
if error is None:
if output is None:
output = input
assert output == normalize_signature(input)
else:
with pytest.raises(error):
normalize_signature(input)
| 47
| 97
| 0.736863
| 1,454
| 12,408
| 6.172627
| 0.065337
| 0.267409
| 0.381059
| 0.481337
| 0.788747
| 0.763677
| 0.747409
| 0.704624
| 0.685905
| 0.671421
| 0
| 0.161248
| 0.129836
| 12,408
| 263
| 98
| 47.178707
| 0.670001
| 0.005964
| 0
| 0.594142
| 0
| 0.133891
| 0.496513
| 0.480292
| 0
| 0
| 0.183455
| 0
| 0.100418
| 1
| 0.037657
| false
| 0.029289
| 0.020921
| 0
| 0.058577
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
301aa916d8f95067c10019e3141e2da838b6d492
| 121
|
py
|
Python
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/panther/calculators/calc_dsa.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 82
|
2016-06-29T17:24:43.000Z
|
2021-04-16T06:49:17.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/panther/calculators/calc_dsa.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 6
|
2022-01-12T18:22:08.000Z
|
2022-03-25T10:19:27.000Z
|
platform/radio/efr32_multiphy_configurator/pyradioconfig/parts/panther/calculators/calc_dsa.py
|
lmnotran/gecko_sdk
|
2e82050dc8823c9fe0e8908c1b2666fb83056230
|
[
"Zlib"
] | 56
|
2016-08-02T10:50:50.000Z
|
2021-07-19T08:57:34.000Z
|
from pyradioconfig.parts.nixi.calculators.calc_dsa import CALC_DSA_nixi
class CALC_DSA_panther(CALC_DSA_nixi):
pass
| 24.2
| 71
| 0.842975
| 19
| 121
| 5
| 0.578947
| 0.294737
| 0.231579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.099174
| 121
| 5
| 72
| 24.2
| 0.87156
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
30796f50157c05afa391060e1c994d16f58984d4
| 11,778
|
py
|
Python
|
hotviz/example/span_data.py
|
AxlAlm/hotviz
|
4269fdf70fcab28a35d11afab8a58393dc2f9a12
|
[
"MIT"
] | null | null | null |
hotviz/example/span_data.py
|
AxlAlm/hotviz
|
4269fdf70fcab28a35d11afab8a58393dc2f9a12
|
[
"MIT"
] | null | null | null |
hotviz/example/span_data.py
|
AxlAlm/hotviz
|
4269fdf70fcab28a35d11afab8a58393dc2f9a12
|
[
"MIT"
] | null | null | null |
span_data = [
{
"token": "I",
"pred": {
"span_id": None,
"label": "X",
"score": 0.1,
},
},
{
"token": "think",
"pred": {
"span_id": None,
"label": "X",
"score": 0.1,
},
},
{
"token": "that",
"pred": {
"span_id": None,
"label": "X",
"score": 0.1,
},
"gold": {
"span_id": "X_1",
"label": "X",
}
},
{
"token": "this",
"pred": {
"span_id": "X_1",
"label": "X",
"score": 0.6,
},
"gold": {
"span_id": "X_1",
"label": "X",
}
},
{
"token": "is",
"pred": {
"span_id": "X_1",
"label": "X",
"score": 0.7,
},
"gold": {
"span_id": "X_1",
"label": "X",
}
},
{
"token": "span",
"pred": {
"span_id": "X_1",
"label": "X",
"score": 0.8,
},
"gold": {
"span_id": "X_1",
"label": "X",
}
},
{
"token": "number",
"pred": {
"span_id": "X_1",
"label": "X",
"score": 0.9,
},
"gold": {
"span_id": "X_1",
"label": "X",
}
},
{
"token": "one",
"pred": {
"span_id": "X_1",
"label": "X",
"score": 0.7,
},
"gold": {
"span_id": "X_1",
"label": "X",
}
},
{
"token": "and",
"pred": {
"span_id": None,
"label": "X",
"score": 0.2,
},
},
{
"token": "then",
"pred": {
"span_id": None,
"label": "Z",
"score": 0.3,
},
},
{
"token": "somewhere",
"pred": {
"span_id": None,
"label": "Z",
"score": 0.4,
},
},
{
"token": "here",
"pred": {
"span_id": "Z_1",
"label": "Z",
"score": 0.8,
},
"gold": {
"span_id": "Z_1",
"label": "Z",
}
},
{
"token": "is",
"pred": {
"span_id": "Z_1",
"label": "Z",
"score": 0.9,
},
"gold": {
"span_id": "Z_1",
"label": "Z",
}
},
{
"token": "span",
"pred": {
"span_id": "Z_1",
"label": "Z",
"score": 0.75,
},
"gold": {
"span_id": "Z_1",
"label": "Z",
}
},
{
"token": "number",
"pred": {
"span_id": "Z_1",
"label": "Z",
"score": 0.85,
},
"gold": {
"span_id": "Z_1",
"label": "Z",
}
},
{
"token": "two",
"pred": {
"span_id": "Z_1",
"label": "Z",
"score": 0.95,
},
"gold": {
"span_id": "Z_1",
"label": "Z",
}
},
{
"token": ".",
"pred": {
"span_id": None,
"label": "Z",
"score": 0.1,
},
},
{
"token": "Lastly,",
"pred": {
"span_id": None,
"label": "Z",
"score": 0.1,
},
},
{
"token": "we",
"pred": {
"span_id": "Z_2",
"label": "Z",
"score": 0.6,
},
},
{
"token": "have",
"pred": {
"span_id": "Z_2",
"label": "Z",
"score": 0.7,
},
},
{
"token": "span",
"pred": {
"span_id": "Z_2",
"label": "Z",
"score": 0.8,
},
"gold": {
"span_id": "Z_2",
"label": "Z",
}
},
{
"token": "number",
"pred": {
"span_id": "Z_2",
"label": "Z",
"score": 0.9,
},
"gold": {
"span_id": "Z_2",
"label": "Z",
}
},
{
"token": "three",
"pred": {
"span_id": "Z_2",
"label": "Z",
"score": 0.8,
},
"gold": {
"span_id": "Z_2",
"label": "Z",
}
},
{
"token": ".",
"pred": {
"span_id": None,
"label": None,
},
},
]
| 45.828794
| 101
| 0.098149
| 374
| 11,778
| 2.909091
| 0.104278
| 0.209559
| 0.220588
| 0.154412
| 0.902574
| 0.889706
| 0.875919
| 0.875919
| 0.594669
| 0.294118
| 0
| 0.036398
| 0.818051
| 11,778
| 257
| 102
| 45.828794
| 0.471302
| 0
| 0
| 0.562249
| 0
| 0
| 0.090091
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
065cb4b5bb16055e3c96c6c238abe821273bf7cd
| 13,639
|
py
|
Python
|
tests/components/nanoleaf/test_config_flow.py
|
hnrkp/core
|
24d017f9744341d42b8fa837e157846c0bb03707
|
[
"Apache-2.0"
] | 1
|
2021-12-11T00:41:25.000Z
|
2021-12-11T00:41:25.000Z
|
tests/components/nanoleaf/test_config_flow.py
|
hnrkp/core
|
24d017f9744341d42b8fa837e157846c0bb03707
|
[
"Apache-2.0"
] | 69
|
2020-08-04T09:03:43.000Z
|
2022-03-31T06:13:01.000Z
|
tests/components/nanoleaf/test_config_flow.py
|
BKPepe/home-assistant
|
b4238443c86860a0540df53a8a11b9fed87dea3c
|
[
"Apache-2.0"
] | null | null | null |
"""Test the Nanoleaf config flow."""
from unittest.mock import patch
from pynanoleaf import InvalidToken, NotAuthorizingNewTokens, Unavailable
from homeassistant import config_entries
from homeassistant.components.nanoleaf.const import DOMAIN
from homeassistant.const import CONF_HOST, CONF_TOKEN
from homeassistant.core import HomeAssistant
TEST_NAME = "Canvas ADF9"
TEST_HOST = "192.168.0.100"
TEST_TOKEN = "R34F1c92FNv3pcZs4di17RxGqiLSwHM"
TEST_OTHER_TOKEN = "Qs4dxGcHR34l29RF1c92FgiLQBt3pcM"
TEST_DEVICE_ID = "5E:2E:EA:XX:XX:XX"
TEST_OTHER_DEVICE_ID = "5E:2E:EA:YY:YY:YY"
async def test_user_unavailable_user_step(hass: HomeAssistant) -> None:
"""Test we handle Unavailable errors when host is not available in user step."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.nanoleaf.config_flow.Nanoleaf.authorize",
side_effect=Unavailable("message"),
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_HOST: TEST_HOST,
},
)
assert result2["type"] == "form"
assert result2["step_id"] == "user"
assert result2["errors"] == {"base": "cannot_connect"}
assert not result2["last_step"]
async def test_user_unavailable_link_step(hass: HomeAssistant) -> None:
"""Test we abort if the device becomes unavailable in the link step."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.nanoleaf.config_flow.Nanoleaf.authorize",
return_value=None,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_HOST: TEST_HOST,
},
)
assert result2["type"] == "form"
assert result2["step_id"] == "link"
with patch(
"homeassistant.components.nanoleaf.config_flow.Nanoleaf.authorize",
side_effect=Unavailable("message"),
):
result3 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
assert result3["type"] == "abort"
assert result3["reason"] == "cannot_connect"
async def test_user_unavailable_setup_finish(hass: HomeAssistant) -> None:
"""Test we abort if the device becomes unavailable during setup_finish."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.nanoleaf.config_flow.Nanoleaf.authorize",
return_value=None,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_HOST: TEST_HOST,
},
)
assert result2["type"] == "form"
assert result2["step_id"] == "link"
with patch(
"homeassistant.components.nanoleaf.config_flow.Nanoleaf.authorize",
return_value=None,
), patch(
"homeassistant.components.nanoleaf.config_flow.pynanoleaf_get_info",
side_effect=Unavailable("message"),
):
result3 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
assert result3["type"] == "abort"
assert result3["reason"] == "cannot_connect"
async def test_user_not_authorizing_new_tokens(hass: HomeAssistant) -> None:
"""Test we handle NotAuthorizingNewTokens errors."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
assert result["type"] == "form"
assert result["errors"] is None
assert not result["last_step"]
assert result["step_id"] == "user"
with patch(
"homeassistant.components.nanoleaf.config_flow.Nanoleaf.authorize",
side_effect=NotAuthorizingNewTokens("message"),
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_HOST: TEST_HOST,
},
)
assert result2["type"] == "form"
assert result2["errors"] is None
assert result2["step_id"] == "link"
result3 = await hass.config_entries.flow.async_configure(
result["flow_id"],
)
assert result3["type"] == "form"
assert result3["errors"] is None
assert result3["step_id"] == "link"
with patch(
"homeassistant.components.nanoleaf.config_flow.Nanoleaf.authorize",
side_effect=NotAuthorizingNewTokens("message"),
):
result4 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
assert result4["type"] == "form"
assert result4["step_id"] == "link"
assert result4["errors"] == {"base": "not_allowing_new_tokens"}
async def test_user_exception(hass: HomeAssistant) -> None:
"""Test we handle Exception errors."""
result = await hass.config_entries.flow.async_init(
DOMAIN, context={"source": config_entries.SOURCE_USER}
)
with patch(
"homeassistant.components.nanoleaf.config_flow.Nanoleaf.authorize",
side_effect=Exception,
):
result2 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_HOST: TEST_HOST,
},
)
assert result2["type"] == "form"
assert result2["step_id"] == "user"
assert result2["errors"] == {"base": "unknown"}
assert not result2["last_step"]
with patch(
"homeassistant.components.nanoleaf.config_flow.Nanoleaf.authorize",
return_value=None,
):
result3 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{
CONF_HOST: TEST_HOST,
},
)
assert result3["step_id"] == "link"
with patch(
"homeassistant.components.nanoleaf.config_flow.Nanoleaf.authorize",
side_effect=Exception,
):
result4 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
assert result4["type"] == "form"
assert result4["step_id"] == "link"
assert result4["errors"] == {"base": "unknown"}
with patch(
"homeassistant.components.nanoleaf.config_flow.Nanoleaf.authorize",
return_value=None,
), patch(
"homeassistant.components.nanoleaf.config_flow.pynanoleaf_get_info",
side_effect=Exception,
):
result5 = await hass.config_entries.flow.async_configure(
result["flow_id"],
{},
)
assert result5["type"] == "abort"
assert result5["reason"] == "unknown"
async def test_zeroconf_discovery(hass: HomeAssistant) -> None:
"""Test zeroconfig discovery flow init."""
zeroconf = "_nanoleafms._tcp.local"
with patch(
"homeassistant.components.nanoleaf.config_flow.pynanoleaf_get_info",
return_value={"name": TEST_NAME},
), patch(
"homeassistant.components.nanoleaf.config_flow.load_json",
return_value={},
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_ZEROCONF},
data={
"host": TEST_HOST,
"name": f"{TEST_NAME}.{zeroconf}",
"type": zeroconf,
"properties": {"id": TEST_DEVICE_ID},
},
)
assert result["type"] == "form"
assert result["step_id"] == "link"
async def test_homekit_discovery_link_unavailable(
hass: HomeAssistant,
) -> None:
"""Test homekit discovery and abort if device is unavailable."""
homekit = "_hap._tcp.local"
with patch(
"homeassistant.components.nanoleaf.config_flow.pynanoleaf_get_info",
return_value={"name": TEST_NAME},
), patch(
"homeassistant.components.nanoleaf.config_flow.load_json",
return_value={},
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HOMEKIT},
data={
"host": TEST_HOST,
"name": f"{TEST_NAME}.{homekit}",
"type": homekit,
"properties": {"id": TEST_DEVICE_ID},
},
)
assert result["type"] == "form"
assert result["step_id"] == "link"
context = next(
flow["context"]
for flow in hass.config_entries.flow.async_progress()
if flow["flow_id"] == result["flow_id"]
)
assert context["title_placeholders"] == {"name": TEST_NAME}
assert context["unique_id"] == TEST_NAME
with patch(
"homeassistant.components.nanoleaf.config_flow.Nanoleaf.authorize",
side_effect=Unavailable("message"),
):
result = await hass.config_entries.flow.async_configure(result["flow_id"], {})
assert result["type"] == "abort"
assert result["reason"] == "cannot_connect"
async def test_import_config(hass: HomeAssistant) -> None:
"""Test configuration import."""
with patch(
"homeassistant.components.nanoleaf.config_flow.pynanoleaf_get_info",
return_value={"name": TEST_NAME},
), patch(
"homeassistant.components.nanoleaf.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={CONF_HOST: TEST_HOST, CONF_TOKEN: TEST_TOKEN},
)
assert result["type"] == "create_entry"
assert result["title"] == TEST_NAME
assert result["data"] == {
CONF_HOST: TEST_HOST,
CONF_TOKEN: TEST_TOKEN,
}
await hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 1
async def test_import_config_invalid_token(hass: HomeAssistant) -> None:
"""Test configuration import with invalid token."""
with patch(
"homeassistant.components.nanoleaf.config_flow.pynanoleaf_get_info",
side_effect=InvalidToken("message"),
):
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_IMPORT},
data={CONF_HOST: TEST_HOST, CONF_TOKEN: TEST_TOKEN},
)
assert result["type"] == "abort"
assert result["reason"] == "invalid_token"
async def test_import_last_discovery_integration_host_zeroconf(
hass: HomeAssistant,
) -> None:
"""
Test discovery integration import from < 2021.4 (host) with zeroconf.
Device is last in Nanoleaf config file.
"""
zeroconf = "_nanoleafapi._tcp.local"
with patch(
"homeassistant.components.nanoleaf.config_flow.load_json",
return_value={TEST_HOST: {"token": TEST_TOKEN}},
), patch(
"homeassistant.components.nanoleaf.config_flow.pynanoleaf_get_info",
return_value={"name": TEST_NAME},
), patch(
"homeassistant.components.nanoleaf.config_flow.os.remove",
return_value=None,
) as mock_remove, patch(
"homeassistant.components.nanoleaf.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_ZEROCONF},
data={
"host": TEST_HOST,
"name": f"{TEST_NAME}.{zeroconf}",
"type": zeroconf,
"properties": {"id": TEST_DEVICE_ID},
},
)
assert result["type"] == "create_entry"
assert result["title"] == TEST_NAME
assert result["data"] == {
CONF_HOST: TEST_HOST,
CONF_TOKEN: TEST_TOKEN,
}
mock_remove.assert_called_once()
await hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 1
async def test_import_not_last_discovery_integration_device_id_homekit(
hass: HomeAssistant,
) -> None:
"""
Test discovery integration import from >= 2021.4 (device_id) with homekit.
Device is not the only one in the Nanoleaf config file.
"""
homekit = "_hap._tcp.local"
with patch(
"homeassistant.components.nanoleaf.config_flow.load_json",
return_value={
TEST_DEVICE_ID: {"token": TEST_TOKEN},
TEST_OTHER_DEVICE_ID: {"token": TEST_OTHER_TOKEN},
},
), patch(
"homeassistant.components.nanoleaf.config_flow.pynanoleaf_get_info",
return_value={"name": TEST_NAME},
), patch(
"homeassistant.components.nanoleaf.config_flow.save_json",
return_value=None,
) as mock_save_json, patch(
"homeassistant.components.nanoleaf.async_setup_entry",
return_value=True,
) as mock_setup_entry:
result = await hass.config_entries.flow.async_init(
DOMAIN,
context={"source": config_entries.SOURCE_HOMEKIT},
data={
"host": TEST_HOST,
"name": f"{TEST_NAME}.{homekit}",
"type": homekit,
"properties": {"id": TEST_DEVICE_ID},
},
)
assert result["type"] == "create_entry"
assert result["title"] == TEST_NAME
assert result["data"] == {
CONF_HOST: TEST_HOST,
CONF_TOKEN: TEST_TOKEN,
}
mock_save_json.assert_called_once()
await hass.async_block_till_done()
assert len(mock_setup_entry.mock_calls) == 1
| 34.0975
| 86
| 0.630178
| 1,470
| 13,639
| 5.597279
| 0.093878
| 0.058459
| 0.113029
| 0.126884
| 0.81332
| 0.782936
| 0.763004
| 0.745017
| 0.743437
| 0.743437
| 0
| 0.008694
| 0.249432
| 13,639
| 399
| 87
| 34.182957
| 0.795057
| 0.0022
| 0
| 0.701754
| 0
| 0
| 0.234748
| 0.155016
| 0
| 0
| 0
| 0
| 0.172515
| 1
| 0
| false
| 0
| 0.035088
| 0
| 0.035088
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
066986712dc49109390134b26929850250390592
| 137,184
|
py
|
Python
|
DocTAG_Dockerized/DocTAG_App/utils_configuration_and_update.py
|
DocTAG/doctag-core
|
0b3c26cca9ef022e6551d5b4f8a02115b497ff2a
|
[
"MIT"
] | 4
|
2021-11-26T09:38:36.000Z
|
2022-01-26T17:46:58.000Z
|
DocTAG_Dockerized/DocTAG_App/utils_configuration_and_update.py
|
DocTAG/doctag-core
|
0b3c26cca9ef022e6551d5b4f8a02115b497ff2a
|
[
"MIT"
] | null | null | null |
DocTAG_Dockerized/DocTAG_App/utils_configuration_and_update.py
|
DocTAG/doctag-core
|
0b3c26cca9ef022e6551d5b4f8a02115b497ff2a
|
[
"MIT"
] | null | null | null |
from DocTAG_App.utils import *
from psycopg2.extensions import register_adapter, AsIs
def addapt_numpy_float64(numpy_float64):
return AsIs(numpy_float64)
def addapt_numpy_int64(numpy_int64):
return AsIs(numpy_int64)
register_adapter(numpy.float64, addapt_numpy_float64)
register_adapter(numpy.int64, addapt_numpy_int64)
from collections import defaultdict
from DocTAG_App.utils_pubmed import *
import os.path
from DocTAG_App.utils_doctag import *
"""This .py file includes the methods needed to configure MedTAG and update its configuration"""
LANGUAGES_NLTK = [
"arabic","danish","dutch","english","finnish","french","german","hungarian","italian","norwegian","porter","portuguese","romanian","russian","spanish","swedish"
]
def check_file(reports,pubmedfiles, labels, concepts, jsonDisp, jsonAnn, username, password,topics,runs,tf_idf):
"""This method checks whether the inserted files complies with the requirements"""
json_resp = {}
json_keys = []
usecases_list = []
docs = []
tops = []
topics_ids = []
documents_ids = []
languages = []
json_resp['general_message'] = ''
json_resp['username_message'] = ''
json_resp['report_message'] = ''
json_resp['pubmed_message'] = ''
json_resp['concept_message'] = ''
json_resp['label_message'] = ''
json_resp['topic_message'] = ''
json_resp['tfidf_message'] = ''
json_resp['runs_message'] = ''
json_resp['fields_message'] = ''
json_resp['keys'] = json_keys
#added 2/11
if (len(labels) == 0 and len(concepts) == 0) and len(topics) == 0 and (len(reports) == 0 and len(pubmedfiles) == 0) and len(runs) == 0:
json_resp[
'general_message'] = 'ERROR - You must provide at least four files: the lables (or concepts), the topics, the runs and the reports.'
# if len(jsonAnn) == 0:
# json_resp[
# 'general_message'] = 'ERROR - You must provide at least one field to annotate.'
elif len(reports) == 0 and len(pubmedfiles) == 0:
json_resp['general_message'] = 'ERROR - You must provide a file with one or more reports or one or more pubmed files.'
elif len(pubmedfiles) > 0 and len(concepts) == 0 and len(labels) == 0:
json_resp['general_message'] = 'PUBMED - only mentions allowed.'
try:
try:
cursor = connection.cursor()
cursor.execute('SELECT * FROM public.user WHERE username = %s', (str(username),))
ans = cursor.fetchall()
# Error on username and password: duplicated username or missing
if len(ans) > 0 or username == 'Test':
json_resp['username_message'] = 'USERNAME - The username you selected is already taken. Choose another one.'
if (username == ''):
json_resp['username_message'] = 'USERNAME - Please, provide a username.'
if password == '' and username == '':
json_resp['username_message'] = 'USERNAME - Please, provide a username and a password.'
except (Exception, psycopg2.Error) as e:
print(e)
json_resp[
'username_message'] = 'An error occurred handling the username and the password. Please, insert them again.'
pass
else:
if json_resp['username_message'] == '':
json_resp['username_message'] = 'Ok'
# This is necessary to collect the fields to annotate and display
fields = []
fields_to_ann = []
jsondisp = ''.join(jsonDisp)
jsonann = ''.join(jsonAnn)
jsondisp = jsondisp.split(',')
jsonann = jsonann.split(',')
for el in jsondisp:
if len(el) > 0:
fields.append(el)
for el in jsonann:
if len(el) > 0:
fields_to_ann.append(el)
if not tf_idf.isdigit():
json_resp['tfidf_message'] = 'TF-IDF - the value must include only digits'
if json_resp['tfidf_message'] == '':
json_resp['tfidf_message'] = 'Ok'
# Error if 0 report files are added
if len(reports) == 0 and len(pubmedfiles) == 0:
json_resp['report_message'] = 'REPORTS FILES - You must provide at least one file containing reports or at least one file containing PubMED IDs before checking'
json_resp['pubmed_message'] = 'PUBMED FILES - You must provide at least one file containing reports or at least one file containing PubMED IDs before checking'
if len(topics) == 0:
json_resp['topic_message'] = 'TOPIC FILES - You must provide at least one file containing topics'
if len(runs) == 0:
json_resp['runs_message'] = 'RUNS FILES - You must provide at least one file containing runs'
# docs_tot = []
for j in range(len(reports)):
r = decompress_files([reports[j]])
for i in range(len(r)):
if isinstance(r[i], str):
rep_name = r[i]
workpath = os.path.dirname(os.path.abspath(__file__)) # Returns the Path your .py file is in
r[i] = os.path.join(workpath, 'static/tmp/' + r[i])
else:
rep_name = r[i].name
if not rep_name.endswith('csv') and not rep_name.endswith('json'):
json_resp['report_message'] = 'DOCUMENTS FILE - ' + rep_name + ' - The file must be .csv and .json and .txt and .Z and .zip'
break
if rep_name.endswith('.csv'):
try:
df = pd.read_csv(r[i])
df = df.where(pd.notnull(df), None)
df = df.reset_index(drop=True) # Useful if the csv includes only commas
except Exception as e:
print(e)
json_resp['report_message'] = 'DOCUMENTS FILE - ' + rep_name + ' - An error occurred while parsing the csv. Check if it is well formatted. Check if it contains as many columns as they are declared in the header.'
pass
else:
# check if colunns are allowed and without duplicates
cols = list(df.columns)
list_db_col = ['document_id','language']
list_not_db_col = []
missing = False
for el in list_db_col:
if el not in cols and el == 'document_id':
json_resp['report_message'] = 'DOCUMENTS FILE - ' + rep_name + ' - The column: ' + el + ' is missing, please add it.'
missing = True
break
if missing:
break
for id in list(df.document_id.unique()):
if not str(id) in documents_ids:
# json_resp['report_message'] = 'WARNING DOCUMENTS FILE - ' + rep_name + ' - The id: ' + str(id) + ' is duplicated. The duplicates are ignored.'
# else:
documents_ids.append(str(id))
for el in cols:
if el not in list_db_col:
list_not_db_col.append(el)
for el in df.document_id:
if el.lower().startswith('pubmed_'):
json_resp['report_message'] = 'DOCUMENTS FILE - ' + rep_name + ' - reports\' ids can not start with "PUBMED_", please, change the name'
# if 0 optional columns are added
if len(list_not_db_col) == 0:
json_resp['report_message'] = 'DOCUMENTS FILE - ' + rep_name + ' - You must provide at least one column other than the documents\' ids'
break
# Check if the csv is empty with 0 rows
if df.shape[0] == 0:
json_resp['report_message'] = 'DOCUMENTS FILE - ' + rep_name + ' - You must provide at least a report.'
break
else:
# check if columns id_report and language have no duplicates
df_dup = df[df.duplicated(subset=['document_id'], keep=False)]
if 'language' in df:
df_dup = df[df.duplicated(subset=['document_id','language'], keep=False)]
if df_dup.shape[0] > 0:
json_resp['report_message'] = 'WARNING DOCUMENTS FILE - ' + rep_name + ' - The rows: ' + str(
df_dup.index.to_list()) + ' are duplicated. The duplicates are ignored.'
# Check if the optional rows are empty for one or more reports.
exit = False
# docs_tot.extend(list(df.document_id.unique()))
for ind in range(df.shape[0]):
count_both = 0
not_none_cols = []
isnone = True
for el in list_not_db_col:
if df.loc[ind, el] is not None:
isnone = False
not_none_cols.append(el)
for el in not_none_cols:
if el not in jsonann and el not in jsondisp:
count_both = count_both + 1
if count_both == len(not_none_cols):
json_resp['fields_message'] = 'WARNING REPORT FIELDS TO DISPLAY AND ANNOTATE - ' + rep_name + ' - With this configuration the report at the row: ' + str(
ind) + ' would not be displayed since the columns to display are all empty for that report.'
if isnone:
exit = True
json_resp['report_message'] = 'DOCUMENTS FILE - ' + rep_name + ' - The report at row ' + str(ind) + ' has the columns: ' + ', '.join(
list_not_db_col) + ' empty. Provide a value for at least one of these columns.'
break
if exit:
break
# check if there are None in mandatory columns
el = ''
if None in df['document_id'].tolist():
el = 'document_id'
if el != '':
lista = df[el].tolist()
ind = lista.index(None)
json_resp['report_message'] = 'DOCUMENTS FILE - ' + rep_name + ' - The column ' + el + ' is empty at the row: ' + str(ind) + '.'
break
elif rep_name.endswith('.json'):
if isinstance(r[i], str):
r[i] = open(r[i], 'r')
d = json.load(r[i])
if 'collection' not in d.keys():
json_resp['reports_message'] = 'DOCUMENTS FILE - ' + rep_name + ' - The json is not well formatted.'
break
exit = False
keys_list = []
if len(d['collection']) == 0:
json_resp['report_message'] = 'DOCUMENTS FILE - ' + rep_name + ' - You must provide at least a report.'
break
for document in d['collection']:
ind = d['collection'].index(document)
if 'document_id' not in list(document.keys()) or document['document_id'] is None:
json_resp['reports_message'] = 'DOCUMENTS FILE - ' + rep_name + ' - The ' + str(
ind) + ' document does not contain the "document_id" key which is mandatory.'
exit = True
break
doc_keys = list(document.keys())
# docs_tot.append(str(document['document_id']))
if 'language' in document.keys():
doc_keys.remove('language')
doc_keys.remove('document_id')
is_none = True
for key in doc_keys:
if key != 'document_id' and key != 'language':
if document[key] is not None:
is_none = False
break
if (('document_id' in doc_keys and len(doc_keys) == 1) or ('document_id' in doc_keys and 'language' in doc_keys and len(doc_keys) == 2)) or is_none:
json_resp['reports_message'] = 'DOCUMENTS FILE - ' + rep_name + ' - The ' + str(ind) + ' document does not contain the document\' s text.'
keys_list.extend(doc_keys)
if str(document['document_id']) in documents_ids:
json_resp['report_message'] = 'WARNING DOCUMENTS FILE - ' + rep_name + ' - The id ' + str(document['document_id']) + ' is duplicated.'
else:
documents_ids.append(str(document['document_id']))
count_both = 0
for el in doc_keys:
if el not in jsonann and el not in jsondisp:
count_both += 1
if count_both == len(doc_keys):
json_resp['fields_message'] = 'WARNING REPORT FIELDS TO DISPLAY AND ANNOTATE - ' + rep_name + ' - With this configuration the report at the row: ' + str(
ind) + ' would not be displayed since the columns to display are all empty for that report.'
if exit == True:
break
if isinstance(r[i], str):
r[i].close()
if len(reports) > 0:
if json_resp['report_message'] == '':
json_resp['report_message'] = 'Ok'
for i in range(len(pubmedfiles)):
# Error if the file is not csv
if not pubmedfiles[i].name.endswith('csv') and not pubmedfiles[i].name.endswith('json') and not pubmedfiles[i].name.endswith(
'txt'):
json_resp['pubmed_message'] = 'PUBMED FILE - ' + pubmedfiles[i].name + ' - The file must be .csv or .json or .txt'
break
if pubmedfiles[i].name.endswith('csv'):
try:
df = pd.read_csv(pubmedfiles[i])
df = df.where(pd.notnull(df), None)
df = df.reset_index(drop=True) # Useful if the csv includes only commas
except Exception as e:
print(e)
json_resp['pubmed_message'] = 'PUBMED FILE - ' + pubmedfiles[
i].name + ' - An error occurred while parsing the csv. Check if it is well formatted. Check if it contains as many columns as they are declared in the header.'
pass
else:
# check if colunns are allowed and without duplicates
cols = list(df.columns)
list_db_col = ['document_id']
missing = False
for el in list_db_col:
if el not in cols and el == 'document_id':
json_resp['pubmed_message'] = 'PUBMED FILE - ' + pubmedfiles[i].name + ' - The column: ' + el + ' is missing, please add it.'
missing = True
break
if missing:
break
for column in cols:
null_val = df[df[column].isnull()].index.tolist()
if len(null_val) > 0:
json_resp['pubmed_message'] = 'PUBMED FILE - ' + pubmedfiles[i].name + ' - You did not inserted the '+column +' for rows: '+null_val.split(', ')
# Check if the csv is empty with 0 rows
if df.shape[0] == 0:
json_resp['pubmed_message'] = 'PUBMED FILE - ' + pubmedfiles[i].name + ' - You must provide at least a report.'
break
else:
# check if columns id_report and language have no duplicates
if 'language' in df:
df_dup = df[df.duplicated(subset=['document_id','language'], keep=False)]
else:
df_dup = df[df.duplicated(subset=['document_id'], keep=False)]
if df_dup.shape[0] > 0:
json_resp['pubmed_message'] = 'WARNING PUBMED FILE - ' + pubmedfiles[i].name + ' - The rows: ' + str(df_dup.index.to_list()) + ' are duplicated. The duplicates are ignored.'
ids = ['PUBMED_'+str(id) for id in list(df.document_id.unique())]
documents_ids.extend(ids)
elif pubmedfiles[i].name.endswith('json'):
d = json.load(pubmedfiles[i])
if 'pubmed_ids' not in d.keys():
json_resp['pubmed_message'] = 'PUBMED FILE - ' + pubmedfiles[
i].name + ' - json is not well formatted.'
break
if d['pubmed_ids'] == []:
json_resp['pubmed_message'] = 'PUBMED FILE - ' + pubmedfiles[
i].name + ' - you must provide at least an article id.'
break
if not isinstance(d['pubmed_ids'],list):
json_resp['pubmed_message'] = 'PUBMED FILE - ' + pubmedfiles[
i].name + ' - you must provide at least an article id.'
break
if len(d['pubmed_ids']) != len(list(set(d['pubmed_ids']))):
json_resp['pubmed_message'] = 'WARNING PUBMED FILE - ' + runs[
i].name + ' - some ids seem to be duplicated. They will be ignored.'
break
ids = ['PUBMED_'+str(id) for id in d['pubmed_ids']]
documents_ids.extend(ids)
elif pubmedfiles[i].name.endswith('txt'):
lines = pubmedfiles[i].readlines()
ids = ['PUBMED_'+str(line) for line in lines]
if len(lines) == 0 :
json_resp['pubmed_message'] = 'PUBMED FILE - ' + runs[
i].name + ' - the file is empty.'
break
if len(lines) != len(list(set(lines))):
json_resp['pubmed_message'] = 'WARNING PUBMED FILE - ' + runs[
i].name + ' - the file contain some duplicates: they will be ignored.'
documents_ids.extend(ids)
if len(pubmedfiles)>0:
if json_resp['pubmed_message'] == '':
json_resp['pubmed_message'] = 'Ok'
if len(topics) > 0:
for i in range(len(topics)):
if not topics[i].name.endswith('csv') and not topics[i].name.endswith('json') and not topics[i].name.endswith('txt'):
json_resp['topic_message'] = 'TOPIC FILE - ' + topics[i].name + ' - The file must be .csv or .json or .txt'
break
if topics[i].name.endswith('csv'):
# if not labels[i].name.endswith('csv'):
# json_resp['label_message'] = 'LABELS FILE - ' + labels[i].name + ' - The file must be .csv'
try:
df = pd.read_csv(topics[i])
df = df.where(pd.notnull(df), None)
df = df.reset_index(drop=True)
except Exception as e:
json_resp['topic_message'] = 'TOPIC FILE - ' + topics[
i].name + ' - An error occurred while parsing the csv. Check if is well formatted.'
pass
else:
cols = list(df.columns)
list_db_col = ['topic_id','title','description','narrative']
# if 'usecase' in cols:
# df['usecase'] = df['usecase'].str.lower()
#
esco = False
for el in list_db_col:
if el not in cols and el == 'topic_id':
esco = True
json_resp['topic_message'] = 'TOPIC FILE - ' + topics[
i].name + ' - The column: ' + el + ' is not present and it is mandatory.'
break
for el in cols:
if el not in list_db_col:
json_resp['topic_message'] = 'WARNING TOPIC FILE - ' + topics[
i].name + ' - The column: ' + el + ' is not present.'
if esco == True:
break
for el in cols:
if el not in list_db_col:
json_resp['topic_message'] = 'TOPIC FILE - ' + topics[
i].name + ' - The column ' + el + ' is not allowed.'
break
for id in list(df.topic_id.unique()):
if str(id) in topics_ids:
json_resp['topic_message'] = 'WARNING TOPIC FILE - ' + topics[
i].name + ' - The topic: ' + str(id) + ' is duplicated. The duplicates are ignored.'
else:
topics_ids.append(str(id))
if df.shape[0] == 0:
json_resp['topic_message'] = 'TOPIC FILE - ' + topics[
i].name + ' - You must provide at least a row.'
break
else:
# check if columns annotation_label and name have no duplicates
df_dup = df[df.duplicated(subset=['topic_id'], keep=False)]
if df_dup.shape[0] > 0:
json_resp['topic_message'] = 'WARNING TOPIC FILE - ' + topics[
i].name + ' - The rows: ' + str(
df_dup.index.to_list()) + ' are duplicated. The duplicates will be ignored.'
el = ''
# if None in df['usecase'].tolist():
# el = 'usecase'
if None in df['topic_id'].tolist():
el = 'topic_id'
if el != '':
lista = df[el].tolist()
ind = lista.index(None)
json_resp['topic_message'] = 'TOPIC FILE - ' + topics[
i].name + ' - The column ' + el + ' is empty at the row: ' + str(ind) + ' .'
break
elif topics[i].name.endswith('.json'):
# with open(topics[i], 'r') as f:
d = json.load(topics[i])
doc_top = []
if 'topics' not in d.keys():
json_resp['topic_message'] = 'TOPIC FILE - ' + topics[
i].name + ' - json is not well formatted.'
break
if d['topics'] == []:
json_resp['topic_message'] = 'TOPIC FILE - ' + labels[
i].name + ' - you must provide at least a label.'
break
for topic in d['topics']:
ind = d['topics'].index(topic)
if 'topic_id' not in list(topic.keys()):
json_resp['topic_message'] = 'TOPIC FILE - ' + topics[
i].name + ' - you must provide a topic number in the '+str(ind)+' th topic.'
break
doc_top.append(str(topic['topic_id']))
if str(topic['topic_id']) in topics_ids:
json_resp['topic_message'] = 'WARNING TOPIC FILE - ' + topics[
i].name + ' - the list of topics contains duplicates. They will be ignored.'
else:
topics_ids.append(str(topic['topic_id']))
if len(doc_top) > len(set(doc_top)):
json_resp['topic_message'] = 'WARNING TOPIC FILE - ' + topics[
i].name + ' - the list of topics contains duplicates. They will be ignored.'
elif topics[i].name.endswith('.txt'):
arr_to_ret = elaborate_runs(runs)
topics_ids = elaborate_TREC_topic_files([],topics[i],'check')
topics_ids = [str(i) for i in topics_ids]
if isinstance(topics_ids,list) == False:
json_resp['topic_message'] = 'TOPIC FILE - ' + topics[i].name + ' - topics are not well formatted.'
break
if json_resp['topic_message'] == '':
json_resp['topic_message'] = 'Ok'
if len(runs) > 0:
for i in range(len(runs)):
if not runs[i].name.endswith('csv') and not runs[i].name.endswith('json') and not runs[i].name.endswith('txt'):
json_resp['runs_message'] = 'RUNS FILE - ' + runs[i].name + ' - The file must be .csv or .json or .txt'
break
if runs[i].name.endswith('csv'):
print(runs[i])
# if not labels[i].name.endswith('csv'):
# json_resp['label_message'] = 'LABELS FILE - ' + labels[i].name + ' - The file must be .csv'
try:
df = pd.read_csv(runs[i])
df = df.where(pd.notnull(df), None)
df = df.reset_index(drop=True)
except Exception as e:
print(e)
json_resp['runs_message'] = 'RUNS FILE - ' + runs[
i].name + ' - An error occurred while parsing the csv. Check if is well formatted.'
pass
else:
cols = list(df.columns)
list_db_col = ['topic_id', 'document_id','language']
esco = False
for el in list_db_col:
if el not in cols and el != 'language':
esco = True
json_resp['runs_message'] = 'RUNS FILE - ' + runs[
i].name + ' - The column: ' + el + ' is not present and it is mandatory.'
break
if esco == True:
break
for el in cols:
if el not in list_db_col:
json_resp['runs_message'] = 'RUNS FILE - ' + runs[
i].name + ' - The column ' + el + ' is not allowed.'
break
if df.shape[0] == 0:
json_resp['runs_message'] = 'RUNS FILE - ' + runs[
i].name + ' - You must provide at least a row.'
break
else:
# check if columns annotation_label and name have no duplicates
if 'language' in df:
df_dup = df[df.duplicated(subset=['topic_id','document_id','language'], keep=False)]
else:
df_dup = df[df.duplicated(subset=['topic_id','document_id'], keep=False)]
if df_dup.shape[0] > 0:
json_resp['runs_message'] = 'WARNING RUNS FILE - ' + runs[
i].name + ' - The rows: ' + str(
df_dup.index.to_list()) + ' are duplicated. The duplicates will be ignored.'
el = ''
# if None in df['usecase'].tolist():
# el = 'usecase'
if None in df['topic_id'].tolist():
el = 'topic_id'
if None in df['document_id'].tolist():
el = 'document_id'
if 'language' in df:
el = 'language'
if el != '':
lista = df[el].tolist()
ind = lista.index(None)
json_resp['runs_message'] = 'RUNS FILE - ' + runs[
i].name + ' - The column ' + el + ' is empty at the row: ' + str(ind) + ' .'
break
tops.extend(df.topic_id.unique())
for el in tops:
if str(el) not in topics_ids:
json_resp['runs_message'] = 'RUNS FILE - ' + runs[
i].name + ' - The topic: ' + str(el) + ' is not in the provided list of topics.'
break
docs.extend(df.document_id.unique())
for el in docs:
if str(el) not in documents_ids:
json_resp['runs_message'] = 'RUNS FILE - ' + runs[
i].name + ' - The document: ' + str(el) + ' is not in the provided list of documents.'
break
elif runs[i].name.endswith('.json'):
# with open(runs[i], 'r') as f:
d = json.load(runs[i])
if 'run' not in d.keys():
json_resp['runs_message'] = 'RUNS FILE - ' + runs[
i].name + ' - json is not well formatted.'
break
if d['run'] == []:
json_resp['runs_message'] = 'RUNS FILE - ' + runs[
i].name + ' - you must provide at least a topic and one or more documents associated.'
break
for r in d['run']:
ind = d['run'].index(r)
if 'topic_id' not in r.keys():
json_resp['runs_message'] = 'RUNS FILE - ' + topics[
i].name + ' - you must provide a topic number in the ' + str(ind) + ' th element.'
break
if 'documents' not in r.keys():
json_resp['runs_message'] = 'RUNS FILE - ' + topics[
i].name + ' - you must provide a topic\'s list for the topic: ' + str(r['num']) + '.'
break
if isinstance(r['documents'][0],dict):
doc1 = [el['document_id'] for el in r['documents']]
else:
doc1 = r['documents']
for el in r['documents']:
if isinstance(el,dict):
if 'document_id' not in el.keys() and 'language' not in el.keys():
json_resp['runs_message'] = 'RUNS FILE - ' + topics[
i].name + ' - you must provide a document_id and a language'
break
tops.append(r['topic_id'])
docs.extend(doc1)
for el in tops:
if el not in topics_ids:
json_resp['runs_message'] = 'RUNS FILE - ' + runs[
i].name + ' - The topic: ' + str(el) + ' is not in the provided list of topics.'
break
for el in docs:
if str(el) not in documents_ids:
json_resp['runs_message'] = 'RUNS FILE - ' + runs[
i].name + ' - The document: ' + str(el) + ' is not in the provided list of documents.'
break
elif runs[i].name.endswith('.txt'):
# with open(runs[i], 'r') as f:
lines = runs[i].readlines()
tups = []
for line in lines:
line = line.decode('utf-8')
if len(line.split()) == 2 or len(line.split() == 3):
topic = line.split()[0]
tops.append(topic)
doc = line.split()[1]
docs.append(doc)
tups.append((topic,doc))
elif len(line.split()) > 2: #TREC
topic = line.split()[0]
tops.append(topic)
doc = line.split()[2]
tups.append((topic, doc))
docs.append(doc)
else:
json_resp['run_message'] = 'RUNS FILE - ' + runs[i].name + ' - txt file is not well formatted.'
break
for el in tops:
if str(el) not in topics_ids:
json_resp['runs_message'] = 'RUNS FILE - ' + runs[
i].name + ' - The topic: ' + str(el) + ' is not in the provided list of topics.'
break
for el in docs:
if str(el) not in documents_ids:
json_resp['runs_message'] = 'RUNS FILE - ' + runs[
i].name + ' - The document: ' + str(el) + ' is not in the provided list of documents.'
break
if json_resp['runs_message'] == '':
json_resp['runs_message'] = 'Ok'
if len(concepts) > 0:
for i in range(len(concepts)):
# Check if it is a csv
if concepts[i].name.endswith('csv'):
# json_resp['concept_message'] = 'CONCEPTS FILE - ' + concepts[i].name + ' - The file must be .csv'
try:
df = pd.read_csv(concepts[i])
df = df.where(pd.notnull(df), None)
df = df.reset_index(drop=True)
except Exception as e:
json_resp['concept_message'] = 'CONCEPTS FILE - ' + concepts[i].name + ' - An error occurred while parsing the csv. Check if it is well formatted. '
pass
# print(df)
else:
cols = list(df.columns)
columns_wrong = False
list_db_col = ['concept_url', 'concept_name']
# if 'usecase' in cols:
# df['usecase'] = df['usecase'].str.lower()
# Check if all the mandatory cols are present
for el in list_db_col:
if el not in cols:
columns_wrong = True
json_resp['concept_message'] = 'CONCEPTS FILE - ' + concepts[i].name + ' - The column ' + el + ' is not present. The only columns allowed are: concept_utl, concept_name, usecase, area'
break
if columns_wrong == True:
break
# if load_concepts is not None:
# for el in load_concepts:
# if el in df.usecase.unique():
# json_resp['concept_message'] = 'CONCEPTS FILE - ' + concepts[i].name + ' - You can not insert concepts files for the use case ' + el + ' after having decide to use EXAMODE concepts.'
# break
# header length must be the same, no extra columns
if len(list_db_col) != len(cols):
json_resp['concept_message'] = 'CONCEPTS FILE - ' + concepts[i].name + ' - The columns allowed are: concept_url, concept_name. If you inserted more (less) columns please, remove (add) them.'
break
# Check if the df has no rows
if df.shape[0] == 0:
json_resp['concept_message'] = 'CONCEPTS FILE - ' + concepts[i].name + ' - You must provide at least a concept.'
break
else:
# check if column concept_url has no duplicates
df_dup = df[df.duplicated(subset=['concept_url'], keep=False)]
if df_dup.shape[0] > 0:
json_resp['concept_message'] = 'WARNING CONCEPTS FILE - ' + concepts[i].name + ' - The rows: ' + str(df_dup.index.to_list()) + ' are duplicated. The duplicates will be ignored.'
# Check if there are None in mandatory cols
el = ''
if None in df['concept_url'].tolist():
el = 'concept_url'
elif None in df['concept_name'].tolist():
el = 'concept_name'
if el != '':
lista = df[el].tolist()
ind = lista.index(None)
json_resp['concept_message'] = 'CONCEPTS FILE - ' + concepts[i].name + ' - The column ' + el + ' is empty at the row: ' + str(ind) + ' .'
break
if concepts[i].name.endswith('json'):
# json_resp['concept_message'] = 'CONCEPTS FILE - ' + concepts[i].name + ' - The file must be .csv'
d = json.load(concepts[i])
if 'concepts_list' not in d.keys():
json_resp['concept_message'] = 'CONCEPTS FILE - ' + concepts[
i].name + ' - json is not well formatted'
break
if not isinstance(d['concepts_list'],list):
json_resp['concept_message'] = 'CONCEPTS FILE - ' + concepts[
i].name + ' - json is not well formatted'
break
if len(d['concepts_list']) == 0:
json_resp['concept_message'] = 'CONCEPTS FILE - ' + concepts[
i].name + ' - the list is empty'
break
dup_list = []
for element in d['concepts_list']:
if 'concept_url' not in element.keys() and 'concept_name' not in element.keys():
json_resp['concept_message'] = 'CONCEPTS FILE - ' + concepts[
i].name + ' - each element in the list of concepts must contains concept_url and concept_name'
break
for element in d['concepts_list']:
if element['concept_url'] in dup_list:
json_resp['concept_message'] = 'WARNING CONCEPTS FILE - ' + concepts[
i].name + ' - Some concepts are duplicated, these will be ignored'
if json_resp['concept_message'] == '':
json_resp['concept_message'] = 'Ok'
if len(labels) > 0:
for i in range(len(labels)):
if labels[i].name.endswith('csv'):
# if not labels[i].name.endswith('csv'):
# json_resp['label_message'] = 'LABELS FILE - ' + labels[i].name + ' - The file must be .csv'
try:
df = pd.read_csv(labels[i])
df = df.where(pd.notnull(df), None)
df = df.reset_index(drop=True)
except Exception as e:
json_resp['label_message'] = 'LABELS FILE - ' + labels[i].name + ' - An error occurred while parsing the csv. Check if is well formatted.'
pass
else:
cols = list(df.columns)
list_db_col = ['label']
# if 'usecase' in cols:
# df['usecase'] = df['usecase'].str.lower()
#
esco = False
for el in list_db_col:
if el not in cols:
esco = True
json_resp['label_message'] = 'LABELS FILE - ' + labels[i].name + ' - The columns: ' + el + ' is not present. The columns allowed are: labels, usecase.'
if esco == True:
break
if len(cols) != len(list_db_col):
json_resp['label_message'] = 'LABELS FILE - ' + labels[i].name + ' - The columns allowed are: label, usecase. If you inserted more (less) columns please, remove (add) them.'
break
if df.shape[0] == 0:
json_resp['label_message'] = 'LABELS FILE - ' + labels[i].name + ' - You must provide at least a row.'
break
else:
# check if columns annotation_label and name have no duplicates
df_dup = df[df.duplicated(subset=['label'], keep=False)]
if df_dup.shape[0] > 0:
json_resp['label_message'] = 'WARNING LABELS FILE - ' + labels[i].name + ' - The rows: ' + str(df_dup.index.to_list()) + ' are duplicated. The duplicates will be ignored.'
el = ''
# if None in df['usecase'].tolist():
# el = 'usecase'
if None in df['label'].tolist():
el = 'label'
if el != '':
lista = df[el].tolist()
ind = lista.index(None)
json_resp['label_message'] = 'LABELS FILE - ' + labels[i].name + ' - The column ' + el + ' is empty at the row: ' + str(ind) + ' .'
break
elif labels[i].name.endswith('.json'):
# with open(labels[i],'r') as f:
d = json.load(labels[i])
if 'labels' not in d.keys():
json_resp['label_message'] = 'LABELS FILE - ' + labels[i].name + ' - json is not well formatted.'
break
if d['labels'] == []:
json_resp['label_message'] = 'LABELS FILE - ' + labels[
i].name + ' - you must provide at least a label.'
break
labels = d['labels']
if len(labels) > len(set(labels)):
json_resp['label_message'] = 'WARNING LABELS FILE - ' + labels[
i].name + ' - the list of labels contains duplicates. They will be ignored.'
elif labels[i].name.endswith('.txt'):
# with open(labels[i], 'r') as f:
lines = labels[i].readlines()
labels_list = []
if len(lines) == 0:
json_resp['label_message'] = 'LABELS FILE - ' + labels[i].name + ' - you must provide at least a label.'
break
for line in lines:
line = line.decode('utf-8')
if line not in labels_list:
labels_list.append(line)
else:
json_resp['label_message'] = 'WARNING LABELS FILE - ' + labels[
i].name + ' - the list of labels contains duplicates. They will be ignored.'
if json_resp['label_message'] == '':
json_resp['label_message'] = 'Ok'
if len(jsonAnn) == 0 and len(jsonDisp) == 0 and len(reports)>0:
json_resp['fields_message'] = 'REPORT FIELDS TO DISPLAY AND ANNOTATE - Please provide at least one field to be displayed and/or at least one field to be annotated.'
elif len(jsonAnn) == 0 and len(reports)>0:
if json_resp['fields_message'] == '':
json_resp['fields_message'] = 'WARNING REPORT FIELDS TO ANNOTATE - ok but with this configuration you will not be able to perform mention annotation and linking. Please, select also at least a field to annotate if you want to find some mentions and link them.'
if len(reports) > 0:
if json_resp['fields_message'] == '':
json_resp['fields_message'] = 'Ok'
except Exception as e:
print(e)
json_resp['general_message'] = 'An error occurred. Please check if it is similar to the example we provided.'
return json_resp
else:
if json_resp['general_message'] == '':
json_resp['general_message'] = 'Ok'
return json_resp
import time
from datetime import date
def configure_data(pubmedfiles,reports, labels, concepts, jsondisp, jsonann, jsonall, username, password, topics,runs,tfidf):
"""This method is run after having checked the files inserted by the user"""
filename = ''
language = 'english'
error_location = 'database'
report_usecases = []
created_file = False
today = str(date.today())
try:
with transaction.atomic():
cursor = connection.cursor()
cursor.execute("DELETE FROM annotate;")
cursor.execute("DELETE FROM linked;")
cursor.execute("DELETE FROM associate;")
cursor.execute("DELETE FROM contains;")
cursor.execute("DELETE FROM mention;")
cursor.execute("DELETE FROM belong_to;")
cursor.execute("DELETE FROM annotation_label;")
cursor.execute("DELETE FROM concept;")
cursor.execute("DELETE FROM ground_truth_log_file;")
cursor.execute("DELETE FROM topic_has_document;")
cursor.execute("DELETE FROM report;")
cursor.execute("DELETE FROM use_case;")
cursor.execute("DELETE FROM semantic_area;")
# connection.commit()
cursor.execute("DELETE FROM public.user WHERE username = 'Test'")
cursor.execute("INSERT INTO semantic_area VALUES (%s)",('default_area',))
if username is not None and password is not None:
cursor.execute("INSERT INTO public.user (username,password,profile,ns_id) VALUES(%s,%s,%s,%s);",
(str(username), hashlib.md5(str(password).encode()).hexdigest(), 'Admin', 'Human'))
# cursor.execute("INSERT INTO public.user (username,password,profile,ns_id) VALUES(%s,%s,%s,%s);",
# (str(username), hashlib.md5(str(password).encode()).hexdigest(), 'Admin', 'Robot'))
fields = []
all_fields = []
fields_to_ann = []
jsonall = ''.join(jsonall)
jsondisp = ''.join(jsondisp)
jsonann = ''.join(jsonann)
jsonall = jsonall.split(',')
jsondisp = jsondisp.split(',')
jsonann = jsonann.split(',')
for el in jsonall:
if len(el) > 0:
all_fields.append(el)
for el in jsondisp:
if len(el) > 0:
fields.append(el)
if el not in all_fields:
all_fields.append(el)
for el in jsonann:
if len(el) > 0:
fields_to_ann.append(el)
if el not in all_fields:
all_fields.append(el)
language = 'english'
arr_to_ret = elaborate_runs(runs)
error_location = 'Topic'
for topic in topics:
if topic.name.endswith('txt'):
elaborate_TREC_topic_files(arr_to_ret,topic)
elif topic.name.endswith('json'):
process_topic_json_file(arr_to_ret,topic)
elif topic.name.endswith('csv'):
process_topic_csv_file(arr_to_ret,topic)
error_location = 'Collection'
for file in reports:
reps = decompress_files([file])
for f in reps:
if isinstance(f, str):
file_name = f
workpath = os.path.dirname(os.path.abspath(__file__)) # Returns the Path your .py file is in
f = os.path.join(workpath, 'static\\tmp\\' + f)
else:
file_name = f.name
if file_name.endswith('json'):
find_docs_in_json_collection(arr_to_ret,f)
elif file_name.endswith('csv'):
find_docs_in_csv_collection(arr_to_ret,f)
for file in pubmedfiles:
# if file.name.endswith('json'):
find_docs_in_json_pubmed_collection(arr_to_ret,file)
error_location = 'Runs'
for el in arr_to_ret:
if len(el) == 3:
language = el[2]
topic = UseCase.objects.get(name = el[0])
doc = Report.objects.get(id_report =str(el[1]),language = 'english')
TopicHasDocument.objects.get_or_create(name = topic,language = doc.language,id_report =doc)
if len(labels) > 0:
labs = []
error_location = 'Labels'
for label_file in labels:
if label_file.name.endswith('csv'):
df_labels = pd.read_csv(label_file)
df_labels = df_labels.where(pd.notnull(df_labels), None)
df_labels = df_labels.reset_index(drop=True)
# df_labels['usecase'] = df_labels['usecase'].str.lower()
count_lab_rows = df_labels.shape[0]
for i in range(count_lab_rows):
label = str(df_labels.loc[i, 'label'])
labs.append(label.rstrip())
elif label_file.name.endswith('json'):
d = json.load(label_file)
labels = d['labels']
for label in labels:
labs.append(label.rstrip())
elif label_file.name.endswith('txt'):
lines = label_file.readlines()
for line in lines:
line = line.decode('utf-8')
labs.append(line.replace('\n',''))
for label in labs:
cursor.execute('SELECT * FROM annotation_label')
ans = cursor.fetchall()
if len(ans) == 0:
seq_number = 1
else:
cursor.execute('SELECT seq_number FROM annotation_label ORDER BY seq_number DESC;')
ans = cursor.fetchall()
seq_number = int(ans[0][0]) + 1
cursor.execute("SELECT * FROM annotation_label WHERE label = %s;",
(str(label),))
ans = cursor.fetchall()
if len(ans) == 0:
cursor.execute("INSERT INTO annotation_label (label,seq_number) VALUES (%s,%s);",
(str(label), int(seq_number)))
# Popolate the concepts table
error_location = 'Concepts'
# if load_concepts is not None and load_concepts != '' and load_concepts !=[] and len(concepts) == 0:
# configure_concepts(cursor,load_concepts,'admin')
for concept_file in concepts:
if concept_file.name.endswith('csv'):
df_concept = pd.read_csv(concept_file)
df_concept = df_concept.where(pd.notnull(df_concept), None)
df_concept = df_concept.reset_index(drop=True)
# df_concept['usecase'] = df_concept['usecase'].str.lower()
# print(df_concept)
count_conc_rows = df_concept.shape[0]
for i in range(count_conc_rows):
df_concept = df_concept.where(pd.notnull(df_concept), None)
concept_url = str(df_concept.loc[i, 'concept_url'])
concept_name = str(df_concept.loc[i, 'concept_name'])
# usecase = str(df_concept.loc[i, 'usecase'])
# semantic_area = str(df_concept.loc[i, 'area'])
cursor.execute("SELECT concept_url,json_concept FROM concept WHERE concept_url = %s;",
(str(concept_url),))
ans = cursor.fetchall()
if len(ans) == 0:
# json_concept = json.dumps({'provenance': 'admin', 'insertion_author': 'admin'})
cursor.execute("INSERT INTO concept (concept_url,name) VALUES (%s,%s);",
(str(concept_url), str(concept_name)))
cursor.execute("SELECT * FROM belong_to WHERE concept_url = %s AND name=%s;",
(str(concept_url), 'default_area'))
ans = cursor.fetchall()
if len(ans) == 0:
cursor.execute("INSERT INTO belong_to (concept_url,name) VALUES (%s,%s);",
(str(concept_url), 'default_area'))
elif concept_file.name.endswith('json'):
d = json.load(concept_file)
count_conc_rows = len(d['concepts_list'])
for i in range(count_conc_rows):
concept_url = str(d['concepts_list'][i]['concept_url'])
concept_name = str(d['concepts_list'][i]['concept_name'])
# usecase = str(df_concept.loc[i, 'usecase'])
# semantic_area = str(df_concept.loc[i, 'area'])
cursor.execute("SELECT concept_url,json_concept FROM concept WHERE concept_url = %s;",
(str(concept_url),))
ans = cursor.fetchall()
if len(ans) == 0:
# json_concept = json.dumps({'provenance': 'admin', 'insertion_author': 'admin'})
cursor.execute("INSERT INTO concept (concept_url,name) VALUES (%s,%s);",
(str(concept_url), str(concept_name)))
cursor.execute("SELECT * FROM belong_to WHERE concept_url = %s AND name=%s;",
(str(concept_url), 'default_area'))
ans = cursor.fetchall()
if len(ans) == 0:
cursor.execute("INSERT INTO belong_to (concept_url,name) VALUES (%s,%s);",
(str(concept_url), 'default_area'))
data = {}
data['fields'] = fields
data['fields_to_ann'] = fields_to_ann
data['all_fields'] = all_fields
version = get_version()
workpath = os.path.dirname(os.path.abspath(__file__)) # Returns the Path your .py file is in
version_new = int(version) + 1
filename = 'fields' + str(version_new)
created_file = False
with open(os.path.join(workpath, './config_files/data/' + filename + '.json'), 'w') as outfile:
json.dump(data, outfile)
created_file = True
except (Exception, psycopg2.Error) as e:
print(e)
print('rollback')
# connection.rollback()
if created_file == True:
workpath = os.path.dirname(os.path.abspath(__file__)) # Returns the Path your .py file is in
if filename != '' and filename != 'fields0':
path = os.path.join(workpath, './config_files/data/' + filename + '.json')
os.remove(path)
json_resp = {'error': 'an error occurred in: ' + error_location + '.'}
return json_resp
else:
# connection.commit()
if created_file == True:
for filen in os.listdir(os.path.join(workpath, './config_files/data')):
if filen.endswith('json'):
if filen != '' and filen != 'fields0.json' and filen != filename+'.json':
path = os.path.join(workpath, './config_files/data/' + filen )
os.remove(path)
outfile.close()
if tfidf is not None or (len(runs) > 0 and len(topics) > 0 and (len(reports) > 0) or len(pubmedfiles) > 0):
print(str(tfidf))
cursor = connection.cursor()
cursor.execute('SELECT DISTINCT language FROM report')
ans = cursor.fetchall()
languages = []
for el in ans:
languages.append(el[0])
st = time.time()
if int(tfidf) > 0:
workpath = os.path.dirname(os.path.abspath(__file__)) # Returns the Path your .py file is in
path1 = os.path.join(workpath, './config_files/config.json')
g = open(path1,'r')
data = json.load(g)
data['TF-IDF_k'] = tfidf
with open(path1, 'w') as f:
json.dump(data, f)
t = UseCase.objects.all()
cursor = connection.cursor()
json_to_write = {}
for top in t:
print('topic:'+str(top))
json_to_write[top.name] = {}
topic = {}
corpus = []
cursor.execute(
"SELECT r.id_report,r.language,r.report_json FROM report as r inner join topic_has_document as t on t.id_report = r.id_report and r.language = t.language where t.name = %s",
[str(top.name)])
ans = cursor.fetchall()
for el in ans:
e = json.loads(el[2])
r_j1 = {}
r_j1['document_id'] = str(el[0])
r_j1['text'] = ''
for k in e.keys():
if k != 'document_id' or (str(el[0]).startswith('PUBMED_') and (k == 'abstract' or k == 'title')):
r_j1['text'] = r_j1['text'] + ' ' + str(e[k])
if el[1].lower() in LANGUAGES_NLTK:
corpus.append(r_j1)
topic['title'] = top.title
topic['description'] = top.description
# df_tfidf = gen_tfidf_map(corpus,language)
for el in ans:
if el[1].lower() in LANGUAGES_NLTK:
language = el[1].lower()
start = time.time()
print('working on ', str(el[0]))
e = json.loads(el[2])
r_j1 = {}
r_j1['document_id'] = str(el[0])
r_j1['text'] = ''
for k in e.keys():
# print(k)
# print(e[k])
if isinstance(e[k],list):
e[k] = ', '.join(e[k])
if k != 'document_id' and k != 'language' and e[k] is not None:
r_j1['text'] = r_j1['text'] + ' ' + e[k]
tfidf_matcher = QueryDocMatcher(topic=topic,doc= r_j1, corpus=corpus,language=language)
top_k_matching_words = tfidf_matcher.get_words_to_highlight()
# print(top_k_matching_words)
# json_val = {}
# json_val[str(el[0])] = top_k_matching_words
# json_val['words'] = top_k_matching_words
json_to_write[top.name][str(el[0])] = top_k_matching_words
# print(json_to_write)
end = time.time()
print('elaborated in '+str(end-start)+' seconds')
else:
json_to_write = {}
end = time.time()
print('time',end-st)
path2 = os.path.join(workpath, './config_files/tf_idf_map.json')
with open(path2, 'w') as f:
json.dump(json_to_write, f)
json_resp = {'message': 'Ok'}
return json_resp
#-------------------UPDATE----------------------------
def check_for_update(type_req, pubmedfiles, reports, labels, concepts, jsonDisp, jsonAnn, jsonDispUp, jsonAnnUp,topics,runs,tf_idf):
"""This method checks the files inserted by the user to update the db"""
keys = get_fields_from_json()
ann = keys['fields_to_ann']
disp = keys['fields']
tops = []
docs = []
if jsonDispUp is not None and jsonAnnUp is not None:
jsonDispUp = ''.join(jsonDispUp)
jsonAnnUp = ''.join(jsonAnnUp)
jsonDispUp = jsonDispUp.split(',')
jsonAnnUp = jsonAnnUp.split(',')
try:
cursor = connection.cursor()
message = ''
if tf_idf is not None:
message = 'TF-IDF - the value must include only digits'
return message
if len(concepts) > 0:
message = ''
for i in range(len(concepts)):
if not concepts[i].name.endswith('csv') and not concepts[i].name.endswith('json'):
message = 'CONCEPTS FILE - ' + labels[i].name + ' - The file must be .csv, .json'
return message
if concepts[i].name.endswith('csv'):
try:
df = pd.read_csv(concepts[i])
df = df.where(pd.notnull(df), None)
df = df.reset_index(drop=True)
except Exception as e:
message = 'CONCEPTS FILE - ' + concepts[
i].name + ' - An error occurred while parsing the csv. Check if it is well formatted.'
return message
else:
list_db_col = ['concept_url', 'concept_name']
cols = list(df.columns)
for el in list_db_col:
if el not in cols:
message = 'CONCEPTS FILE - ' + concepts[i].name + ' - The columns: ' + el + ' is missing. Please, add it.'
return message
if len(list_db_col) != len(cols):
message = 'CONCEPTS FILE - ' + concepts[i].name + ' - The columns allowed are: concept_url, concept_name. If you inserted more (less) columns please, remove (add) them.'
return message
if df.shape[0] == 0:
message = 'CONCEPTS FILE - ' + concepts[i].name + ' - You must provide at least a concept.'
return message
else:
# duplicates in file
df_dup = df[df.duplicated(subset=['concept_url'], keep=False)]
if df_dup.shape[0] > 0:
message = 'WARNING CONCEPTS FILE - ' + concepts[i].name + ' - The rows: ' + str(
df_dup.index.to_list()) + ' are duplicated. The duplicates will be ignored.'
el = ''
if None in df['concept_url'].tolist():
el = 'concept_url'
elif None in df['concept_name'].tolist():
el = 'concept_name'
if el != '':
lista = df[el].tolist()
ind = lista.index(None)
message = 'CONCEPTS FILE - ' + concepts[i].name + ' - The column ' + el + ' is empty at the row: ' + str(ind) + '.'
return message
# Check for duplicates in db
for ind in range(df.shape[0]):
cursor.execute('SELECT COUNT(*) FROM concept WHERE concept_url = %s',
[str(df.loc[ind, 'concept_url'])])
num = cursor.fetchone()
cursor.execute('SELECT COUNT(*) FROM belong_to WHERE concept_url = %s and name = %s',
[str(df.loc[ind, 'concept_url']),str(df.loc[ind, 'area'])])
num_b = cursor.fetchone()
if num[0] > 0 and num_b[0] > 0:
message = 'WARNING CONCEPTS FILE - ' + concepts[i].name + ' - The concept: ' + str(
df.loc[ind, 'concept_url']) + ' is already present in the database. It will be ignored.'
if concepts[i].name.endswith('json'):
# json_resp['concept_message'] = 'CONCEPTS FILE - ' + concepts[i].name + ' - The file must be .csv'
d = json.load(concepts[i])
if 'concepts_list' in d.keys():
message = 'CONCEPTS FILE - ' + concepts[
i].name + ' - json is not well formatted'
return message
if not isinstance(d['concepts_list'], list):
message = 'CONCEPTS FILE - ' + concepts[
i].name + ' - json is not well formatted'
return message
if len(d['concepts_list']) == 0:
message = 'CONCEPTS FILE - ' + concepts[
i].name + ' - the list is empty'
return message
dup_list = []
for element in d['concepts_list']:
if 'concept_url' not in element.keys() and 'concept_name' not in element.keys():
message = 'CONCEPTS FILE - ' + concepts[
i].name + ' - each element in the list of concepts must contains concept_url and concept_name fields'
return message
for element in d['concepts_list']:
if element['concept_url'] in dup_list:
message = 'WARNING CONCEPTS FILE - ' + concepts[
i].name + ' - Some concepts are duplicated, these will be ignored'
return message
elif len(labels) > 0:
message = ''
for i in range(len(labels)):
if not labels[i].name.endswith('csv') and not labels[i].name.endswith('json') and not labels[i].name.endswith('txt'):
message = 'LABELS FILE - ' + labels[i].name + ' - The file must be .csv, .json, .txt'
return message
if labels[i].name.endswith('csv'):
try:
df = pd.read_csv(labels[i])
df = df.where(pd.notnull(df), None)
df = df.reset_index(drop=True)
except Exception as e:
message = 'LABELS FILE - ' + labels[i].name + ' - An error occurred while parsing the csv. Check if is well formatted.'
return message
else:
cols = list(df.columns)
list_db_col = ['label']
# if 'usecase' in cols:
# df['usecase'] = df['usecase'].str.lower()
#
esco = False
for el in list_db_col:
if el not in cols:
esco = True
message = 'LABELS FILE - ' + labels[i].name + ' - The columns: ' + el + ' is not present. The columns allowed are: labels, usecase.'
return message
if esco == True:
break
if len(cols) != len(list_db_col):
message = 'LABELS FILE - ' + labels[i].name + ' - The columns allowed are: label, usecase. If you inserted more (less) columns please, remove (add) them.'
return message
if df.shape[0] == 0:
message = 'LABELS FILE - ' + labels[i].name + ' - You must provide at least a row.'
return message
else:
# check if columns annotation_label and name have no duplicates
df_dup = df[df.duplicated(subset=['label'], keep=False)]
if df_dup.shape[0] > 0:
message = 'WARNING LABELS FILE - ' + labels[i].name + ' - The rows: ' + str(df_dup.index.to_list()) + ' are duplicated. The duplicates will be ignored.'
return message
el = ''
# if None in df['usecase'].tolist():
# el = 'usecase'
if None in df['label'].tolist():
el = 'label'
if el != '':
lista = df[el].tolist()
ind = lista.index(None)
message = 'LABELS FILE - ' + labels[i].name + ' - The column ' + el + ' is empty at the row: ' + str(ind) + ' .'
return message
for ind in range(df.shape[0]):
cursor.execute('SELECT COUNT(*) FROM annotation_label WHERE label = %s',
[str(df.loc[ind, 'label'])])
num = cursor.fetchone()
if num[0] > 0:
message = 'WARNING LABELS FILE - ' + labels[i].name + ' - The label: ' + str(df.loc[ind, 'label']) + ' is already present in the database. It will be ignored.'
elif labels[i].name.endswith('.json'):
# with open(labels[i],'r') as f:
d = json.load(labels[i])
if 'labels' not in d.keys():
message = 'LABELS FILE - ' + labels[i].name + ' - json is not well formatted.'
return message
if d['labels'] == []:
message = 'LABELS FILE - ' + labels[
i].name + ' - you must provide at least a label.'
return message
l = d['labels']
if len(l) > len(set(l)):
message = 'WARNING LABELS FILE - ' + labels[
i].name + ' - the list of labels contains duplicates. They will be ignored.'
for ind in range(len(l)):
cursor.execute('SELECT COUNT(*) FROM annotation_label WHERE label = %s',
[str(l[ind])])
num = cursor.fetchone()
if num[0] > 0:
message = 'WARNING LABELS FILE - ' + labels[i].name + ' - The label: ' + str(l[ind]) + ' is already present in the database. It will be ignored.'
elif labels[i].name.endswith('.txt'):
# with open(labels[i], 'r') as f:
lines = labels[i].readlines()
labels_list = []
if len(lines) == 0:
message = 'LABELS FILE - ' + labels[i].name + ' - you must provide at least a label.'
return message
for line in lines:
line = line.decode('utf-8')
if line not in labels_list:
labels_list.append(line)
else:
message = 'WARNING LABELS FILE - ' + labels[
i].name + ' - the list of labels contains duplicates. They will be ignored.'
for ind in range(len(labels_list)):
cursor.execute('SELECT COUNT(*) FROM annotation_label WHERE label = %s',
[str(labels_list[ind])])
num = cursor.fetchone()
if num[0] > 0:
message = 'WARNING LABELS FILE - ' + labels[i].name + ' - The label: ' + str(labels_list[ind]) + ' is already present in the database. It will be ignored.'
return message
elif (len(pubmedfiles) > 0 or len(reports) > 0) and len(topics) > 0 and len(runs) > 0:
message = ''
documents_ids = []
topics_ids = []
to = UseCase.objects.all().values('name')
for el in to:
topics_ids.append(el['name'])
ids = Report.objects.all().values('id_report')
for el in ids:
documents_ids.append(str(el['id_report']))
for i in range(len(pubmedfiles)):
# Error if the file is not csv
if not pubmedfiles[i].name.endswith('csv') and not pubmedfiles[i].name.endswith('json') and not \
pubmedfiles[i].name.endswith(
'txt'):
message = 'PUBMED FILE - ' + pubmedfiles[
i].name + ' - The file must be .csv or .json or .txt'
return message
if pubmedfiles[i].name.endswith('csv'):
try:
df = pd.read_csv(pubmedfiles[i])
df = df.where(pd.notnull(df), None)
df = df.reset_index(drop=True) # Useful if the csv includes only commas
except Exception as e:
print(e)
message = 'PUBMED FILE - ' + pubmedfiles[
i].name + ' - An error occurred while parsing the csv. Check if it is well formatted. Check if it contains as many columns as they are declared in the header.'
return message
else:
# check if colunns are allowed and without duplicates
cols = list(df.columns)
list_db_col = ['document_id']
for el in list_db_col:
if el not in cols and el == 'document_id':
message = 'PUBMED FILE - ' + pubmedfiles[
i].name + ' - The column: ' + el + ' is missing, please add it.'
return message
for column in cols:
null_val = df[df[column].isnull()].index.tolist()
if len(null_val) > 0:
message = 'PUBMED FILE - ' + pubmedfiles[
i].name + ' - You did not inserted the ' + column + ' for rows: ' + null_val.split(
', ')
# Check if the csv is empty with 0 rows
if df.shape[0] == 0:
message = 'PUBMED FILE - ' + pubmedfiles[
i].name + ' - You must provide at least a report.'
return message
else:
# check if columns id_report and language have no duplicates
# if 'language' in df:
# df_dup = df[df.duplicated(subset=['document_id', 'language'], keep=False)]
# else:
df_dup = df[df.duplicated(subset=['document_id'], keep=False)]
if df_dup.shape[0] > 0:
message = 'WARNING PUBMED FILE - ' + pubmedfiles[
i].name + ' - The rows: ' + str(
df_dup.index.to_list()) + ' are duplicated. The duplicates are ignored.'
for ind in range(df.shape[0]):
found = False
id_report = 'PUBMED_' + str(df.loc[ind, 'document_id'])
cursor.execute('SELECT COUNT(*) FROM report WHERE id_report = %s AND institute = %s',
[str(id_report), 'PUBMED'])
num = cursor.fetchone()
if num[0] > 0:
message = 'WARNING PUBMED FILE - ' + pubmedfiles[i].name + ' - The report: ' + str(
id_report) + ' is already present in the database. It will be ignored.'
for el in list_db_col:
if df.loc[ind, el] is not None:
found = True
break
if found == False:
message = 'PUBMED FILE - ' + pubmedfiles[i].name + ' - The report at row ' + str(
ind) + ' has the columns: ' + ', '.join(
list_db_col) + ' empty. Provide a value for at least one of these columns.'
return message
el = ''
if None in df['document_id'].tolist():
el = 'institute'
if el != '':
lista = df[el].tolist()
ind = lista.index(None)
message = 'PUBMED FILE - ' + pubmedfiles[
i].name + ' - The column ' + el + ' is empty at the row: ' + str(ind) + '.'
return message
elif pubmedfiles[i].name.endswith('json'):
d = json.load(runs[i])
if 'pubmed_ids' not in d.keys():
message = 'PUBMED FILE - ' + runs[
i].name + ' - json is not well formatted.'
return message
if d['pubmed_ids'] == []:
message = 'PUBMED FILE - ' + runs[
i].name + ' - you must provide at least an article id.'
break
if not isinstance(d['pubmed_ids'], list):
message = 'PUBMED FILE - ' + runs[
i].name + ' - you must provide at least an article id.'
return message
if len(d['pubmed_ids']) != len(list(set(d['pubmed_ids']))):
message = 'WARNING PUBMED FILE - ' + runs[
i].name + ' - some ids seem to be duplicated. They will be ignored.'
return message
for el in d['pubmed_ids']:
id_report = 'PUBMED_' + str(str(el))
cursor.execute('SELECT COUNT(*) FROM report WHERE id_report = %s AND institute = %s',
[str(id_report), 'PUBMED'])
num = cursor.fetchone()
if num[0] > 0:
message = 'WARNING PUBMED FILE - ' + pubmedfiles[i].name + ' - The report: ' + str(
id_report) + ' is already present in the database. It will be ignored.'
elif pubmedfiles[i].name.endswith('txt'):
lines = pubmedfiles[i].readlines()
if len(lines) == 0:
message = 'PUBMED FILE - ' + runs[
i].name + ' - the file is empty.'
return message
if len(lines) != len(list(set(lines))):
message = 'WARNING PUBMED FILE - ' + runs[
i].name + ' - the file contain some duplicates: they will be ignored.'
for line in lines:
id_report = 'PUBMED_' + str(line.split()[0])
cursor.execute('SELECT COUNT(*) FROM report WHERE id_report = %s AND institute = %s',
[str(id_report), 'PUBMED'])
num = cursor.fetchone()
if num[0] > 0:
message = 'WARNING PUBMED FILE - ' + pubmedfiles[i].name + ' - The report: ' + str(
id_report) + ' is already present in the database. It will be ignored.'
# elif len(reports) > 0 and len(runs) > 0 and len(topics) > 0:
# message = ''
for i in range(len(reports)):
reps = decompress_files([reports[i]])
for rep in reps:
if isinstance(rep, str):
rep_name = rep
workpath = os.path.dirname(os.path.abspath(__file__)) # Returns the Path your .py file is in
rep = os.path.join(workpath, 'static/tmp/' + rep_name)
else:
rep_name = rep.name
if not rep.name.endswith('csv') and not rep_name.endswith('txt') and not rep.name.endswith('json'):
message = 'DOCUMENTS FILE - ' + rep_name + ' - The file must be .csv, .json, .txt'
return message
if rep_name.endswith('csv'):
try:
df = pd.read_csv(rep)
df = df.where(pd.notnull(df), None)
df = df.reset_index(drop=True)
except Exception as e:
message = 'DOCUMENTS FILE - ' + rep_name + ' - An error occurred while parsing the csv. Check if it is well formatted. '
return message
else:
cols = list(df.columns)
count = 0
list_db_col = ['document_id','language']
list_not_db_col = []
for el in list_db_col:
if el not in cols and el == 'document_id':
message = 'DOCUMENTS FILE - ' + rep_name + ' - The column: ' + str(el) + ' must be present.'
return message
for id in list(df.document_id.unique()):
# if str(id) in documents_ids:
# json_resp['report_message'] = 'WARNING DOCUMENTS FILE - ' + rep_name + ' - The id: ' + str(id) + ' is duplicated. The duplicates are ignored.'
# else:
documents_ids.append(str(id))
for el in cols:
if el not in list_db_col:
list_not_db_col.append(el)
if jsonDispUp is not None and jsonAnnUp is not None:
if len(disp) > 0 or len(ann) > 0:
ann_intersect = list(set(ann) & set(list_not_db_col))
for el in list_not_db_col:
if (el not in disp and el not in ann) and (el not in jsonDispUp and el not in jsonAnnUp):
count = count + 1
if count == len(list_not_db_col):
message = 'DOCUMENTS FIELDS - Please, provide at least one field to display in file: ' + \
rep_name + '. Be careful that if you do not provide one field to annotate you will not be able to perform mention annotation and linking.'
return message
elif len(ann_intersect) == 0 and (jsonAnnUp[0]) == '':
message = 'WARNING DOCUMENTS FIELDS - file: ' + rep_name + ' Please, provide at least one field to annotate if you want to find mentions and perform linking.'
if len(list_not_db_col) == 0:
message = 'DOCUMENTS FILE - ' + rep_name + ' - You must provide at least one column other than document_id'
return message
if df.shape[0] == 0:
message = 'DOCUMENTS FILE - ' + rep_name + ' - You must provide at least a report.'
return message
else:
df_dup = df[df.duplicated(subset=['document_id'], keep=False)]
if 'language' in df:
df_dup = df[df.duplicated(subset=['document_id','language'], keep=False)]
if df_dup.shape[0] > 0:
message = 'WARNING DOCUMENTS FILE - ' + rep_name + ' - The rows: ' + str(
df_dup.index.to_list()) + ' are duplicated. The duplicates are ignored.'
for id in list(df.document_id.unique()):
if str(id) in documents_ids:
message = 'WARNING DOCUMENTS FILE - ' + rep_name + ' - The id: ' + str(
id) + ' is duplicated. The duplicates are ignored.'
else:
documents_ids.append(str(id))
for ind in range(df.shape[0]):
found = False
if 'language' in df:
language = str(df.loc[ind, 'language'])
else:
language = 'english'
cursor.execute('SELECT COUNT(*) FROM report WHERE id_report = %s AND language = %s',
[str(df.loc[ind, 'document_id']),language])
num = cursor.fetchone()
if num[0] > 0:
message = 'WARNING DOCUMENTS FILE - ' + rep_name + ' - The report: ' + str(
df.loc[ind, 'document_id']) + ' is already present in the database. It will be ignored.'
for el in list_db_col:
if df.loc[ind, el] is not None:
found = True
break
if found == False:
message = 'DOCUMENTS FILE - ' + rep_name + ' - The report at row ' + str(
ind) + ' has the column: ' + ', '.join(
list_db_col) + ' empty. '
return message
found = False
count_both = 0
not_none_cols = []
for el in list_not_db_col:
if df.loc[ind, el] is not None:
found = True
not_none_cols.append(el)
if found == False:
message = 'DOCUMENTS FILE - ' + rep_name + ' - The report at row ' + str(
ind) + ' has the columns: ' + ', '.join(
list_not_db_col) + ' empty. Provide a value for at least one of these columns, or delete this report from the csv file.'
return message
for el in not_none_cols:
if jsonAnnUp is not None and jsonDispUp is not None:
if el not in disp and el not in jsonDispUp and el not in ann and el not in jsonAnnUp:
count_both = count_both + 1
else:
if el not in disp and el not in ann:
count_both = count_both + 1
if count_both == len(not_none_cols):
message = 'WARNING DOCUMENTS FIELDS TO DISPLAY AND ANNOTATE - ' + rep_name + ' - With the current configuration the report at the row: ' + str(
ind) + ' would not be displayed since the columns to display are all empty for that report.'
# for el in df.institute.unique():
# if el.lower() == 'pubmed':
# message = 'REPORTS FILE - ' + reports[
# i].name + ' - calling an institute "PUBMED" is forbidden, please, change the name'
#
for el in df.document_id:
if el.lower().startswith('pubmed_'):
message = 'DOCUMENTS FILE - ' + rep_name + ' - reports\' ids can not start with "PUBMED_", please, change the name'
el = ''
if None in df['document_id'].tolist():
el = 'document_id'
if 'language' in df:
el = 'language'
if el != '':
lista = df[el].tolist()
ind = lista.index(None)
message = 'DOCUMENTS FILE - ' + rep_name + ' - The column ' + el + ' is empty at the row: ' + str(ind) + '.'
return message
elif rep_name.endswith('json'):
# with open(rep, 'r') as f:
d = json.load(rep)
if 'collection' not in d.keys():
message = 'DOCUMENTS FILE - ' + rep_name + ' - The json is not well formatted.'
break
exit = False
keys_list = []
if len(d['collection']) == 0:
message = 'DOCUMENTS FILE - ' + rep_name + ' - You must provide at least a report.'
break
for document in d['collection']:
if 'language' in document.keys():
language = document['language']
else:
language = 'english'
cursor.execute('SELECT COUNT(*) FROM report WHERE id_report = %s AND language = %s',
[document['document_id'], language])
num = cursor.fetchone()
if num[0] > 0:
message = 'WARNING REPORT FILE - ' +rep_name + ' - The report: ' + str(
document['document_id']) + ' is already present in the database. It will be ignored.'
ind = d['collection'].index(document)
if 'document_id' not in document.keys() or document['document_id'] is None:
message = 'DOCUMENTS FILE - ' + rep_name + ' - The ' + str(ind) + ' document does not contain the "document_id" key which is mandatory.'
exit = True
break
doc_keys = list(document.keys())
if 'language' in list(document.keys()):
doc_keys.remove('language')
doc_keys.remove('document_id')
is_none = True
for key in list(document.keys()):
if key != 'document_id' and key != 'language':
if document[key] is not None:
is_none = False
break
if ('document_id' in list(document.keys()) and len(list(document.keys())) == 1) or ('language' in list(document.keys()) and len(list(document.keys())) == 2) or is_none:
message = 'DOCUMENTS FILE - ' + rep_name + ' - The ' + str(ind) + ' document does not contain the document\' s text.'
keys_list.extend(list(document.keys()))
if str(document['document_id']) in documents_ids:
message = 'WARNING DOCUMENTS FILE - ' + rep_name + ' - The id ' + str(document['document_id']) + ' is duplicated.'
else:
documents_ids.append(str(document['document_id']))
count_both = 0
for el in doc_keys:
if jsonAnnUp is not None and jsonDispUp is not None:
if el not in disp and el not in jsonDispUp and el not in ann and el not in jsonAnnUp:
count_both = count_both + 1
else:
if el not in disp and el not in ann:
count_both = count_both + 1
if count_both == len(doc_keys):
message = 'WARNING DOCUMENTS FIELDS TO DISPLAY AND ANNOTATE - ' + reports[
i].name + ' - With the current configuration the report at the row: ' + str(
ind) + ' would not be displayed since the columns to display are all empty for that report.'
if exit == True:
break
if len(topics) > 0:
for i in range(len(topics)):
if not topics[i].name.endswith('csv') and not topics[i].name.endswith('json') and not topics[
i].name.endswith('txt'):
message = 'TOPIC FILE - ' + topics[i].name + ' - The file must be .csv or .json or .txt'
return message
if topics[i].name.endswith('csv'):
try:
df = pd.read_csv(labels[i])
df = df.where(pd.notnull(df), None)
df = df.reset_index(drop=True)
except Exception as e:
message = 'TOPIC FILE - ' + topics[
i].name + ' - An error occurred while parsing the csv. Check if is well formatted.'
pass
else:
cols = list(df.columns)
list_db_col = ['topic_id', 'title', 'description', 'narrative']
for el in list_db_col:
if el not in cols and el == 'topic_id':
message = 'TOPIC FILE - ' + topics[
i].name + ' - The column: ' + el + ' is not present and it is mandatory.'
return message
elif el not in cols:
message = 'WARNING TOPIC FILE - ' + topics[
i].name + ' - The column: ' + el + ' is not present.'
for el in cols:
if el not in list_db_col:
message = 'TOPIC FILE - ' + topics[
i].name + ' - The column ' + el + ' is not allowed.'
return message
for id in list(df.topic_id.unique()):
if id in topics_ids:
message = 'WARNING TOPIC FILE - ' + topics[i].name + ' - The topic: ' + str(id) + ' is duplicated. The duplicates are ignored.'
else:
topics_ids.append(id)
if df.shape[0] == 0:
message = 'TOPIC FILE - ' + topics[
i].name + ' - You must provide at least a row.'
return message
else:
# check if columns annotation_label and name have no duplicates
df_dup = df[df.duplicated(subset=['topic_id'], keep=False)]
if df_dup.shape[0] > 0:
message = 'WARNING TOPIC FILE - ' + topics[
i].name + ' - The rows: ' + str(
df_dup.index.to_list()) + ' are duplicated. The duplicates will be ignored.'
el = ''
if None in df['topic_id'].tolist():
el = 'topic_id'
if el != '':
lista = df[el].tolist()
ind = lista.index(None)
message = 'TOPIC FILE - ' + topics[
i].name + ' - The column ' + el + ' is empty at the row: ' + str(ind) + ' .'
return message
elif topics[i].name.endswith('.txt'):
arr_to_ret = elaborate_runs(runs)
topics_ids = elaborate_TREC_topic_files([], topics[i], 'check')
topics_ids = [str(i) for i in topics_ids]
if isinstance(topics_ids, list) == False:
message = 'TOPIC FILE - ' + topics[
i].name + ' - topics are not well formatted.'
elif topics[i].name.endswith('.json'):
# with open(topics[i], 'r') as f:
d = json.load(topics[i])
doc_top = []
if 'topics' not in d.keys():
message = 'TOPIC FILE - ' + topics[
i].name + ' - json is not well formatted.'
return message
if d['topics'] == []:
message = 'TOPIC FILE - ' + labels[
i].name + ' - you must provide at least a label.'
return message
for topic in d['topics']:
ind = d['topics'].index(topic)
if 'topic_id' not in topic.keys():
message = 'TOPIC FILE - ' + topics[
i].name + ' - you must provide a topic number in the ' + str(ind) + ' th topic.'
return message
doc_top.append(topic['topic_id'])
if str(topic['topic_id']) in topics_ids:
message = 'WARNING TOPIC FILE - ' + topics[
i].name + ' - the list of topics contains duplicates. They will be ignored.'
else:
topics_ids.append(str(topic['topic_id']))
if len(doc_top) > len(set(doc_top)):
message = 'WARNING TOPIC FILE - ' + topics[
i].name + ' - the list of topics contains duplicates. They will be ignored.'
if len(runs) > 0:
language = 'english'
for i in range(len(runs)):
if not runs[i].name.endswith('csv') and not runs[i].name.endswith('json') and not runs[i].name.endswith('txt'):
message = 'RUNS FILE - ' + runs[i].name + ' - The file must be .csv or .json or .txt'
break
if runs[i].name.endswith('csv'):
try:
df = pd.read_csv(runs[i])
df = df.where(pd.notnull(df), None)
df = df.reset_index(drop=True)
except Exception as e:
message = 'RUNS FILE - ' + runs[
i].name + ' - An error occurred while parsing the csv. Check if is well formatted.'
return message
else:
cols = list(df.columns)
list_db_col = ['topic_id', 'document_id', 'language']
for el in list_db_col:
if el not in cols and el != 'language':
message = 'RUNS FILE - ' + runs[
i].name + ' - The column: ' + el + ' is not present and it is mandatory.'
return message
for el in cols:
if el not in list_db_col:
message = 'RUNS FILE - ' + runs[
i].name + ' - The column ' + el + ' is not allowed.'
return message
if df.shape[0] == 0:
message = 'RUNS FILE - ' + runs[
i].name + ' - You must provide at least a row.'
return message
else:
# check if columns annotation_label and name have no duplicates
df_dup = df[df.duplicated(subset=['topic_id', 'document_id'], keep=False)]
if 'language' in df:
df_dup = df[df.duplicated(subset=['topic_id', 'document_id','language'], keep=False)]
if df_dup.shape[0] > 0:
message = 'WARNING RUNS FILE - ' + runs[
i].name + ' - The rows: ' + str(
df_dup.index.to_list()) + ' are duplicated. The duplicates will be ignored.'
for i in range(df.shape[0]):
doc = str(df.loc[i,'document_id'])
top = str(df.loc[i,'topic_id'])
if 'language' in df:
language = str(df.loc[i,'language'])
report = Report.objects.get(id_report = str(doc),language = language)
topic = UseCase.objects.get(name = str(top))
v = TopicHasDocument.objects.filter(name = topic,id_report = report,language =language)
if v.count() > 0:
message = 'WARNING RUNS FILE - ' + runs[i].name + ' - The topic: ' + str(topic) +' is already associated to the document: ' + str(doc)+ ' it will be ignored.'
el = ''
# if None in df['usecase'].tolist():
# el = 'usecase'
if None in df['topic_id'].tolist():
el = 'topic_id'
if None in df['document_id'].tolist():
el = 'document_id'
if 'language' in df:
if None in df['language'].tolist():
el = 'language'
if el != '':
lista = df[el].tolist()
ind = lista.index(None)
message = 'RUNS FILE - ' + topics[
i].name + ' - The column ' + el + ' is empty at the row: ' + str(ind) + ' .'
return message
tops.extend(df.topic_id.unique())
for el in df.topic_id.unique():
if el not in topics_ids:
message = 'RUNS FILE - ' + runs[
i].name + ' - The topic: ' + str(el) + ' is not in the provided list of topics.'
return message
docs.extend(list(df.document_id.unique()))
for el in (df.document_id.unique()):
if str(el) not in documents_ids:
message = 'RUNS FILE - ' + runs[
i].name + ' - The document: ' + str(
el) + ' is not in the provided list of documents.'
return message
elif runs[i].name.endswith('.json'):
# with open(runs[i], 'r') as f:
d = json.load(runs[i])
if 'run' not in d.keys():
message = 'RUNS FILE - ' + runs[
i].name + ' - json is not well formatted.'
return message
if d['run'] == []:
message = 'RUNS FILE - ' + runs[
i].name + ' - you must provide at least a topic and one or more documents associated.'
return message
for r in d['run']:
ind = d['run'].index(r)
tops.append(r['topic_id'])
docs.extend(r['documents'])
if 'topic_id' not in r.keys():
message = 'RUNS FILE - ' + topics[
i].name + ' - you must provide a topic number in the ' + str(
ind) + ' th element.'
return message
if 'documents' not in r.keys():
message = 'RUNS FILE - ' + topics[
i].name + ' - you must provide a topic\'s list for the topic: ' + str(
r['topic_id']) + '.'
return message
for el in r['documents']:
if isinstance(el,str) or isinstance(el,int):
if Report.objects.filter(id_report=str(el)).exists():
report = Report.objects.get(id_report=str(el), language='english')
topic = UseCase.objects.get(name=str(r['topic_id']))
v = TopicHasDocument.objects.filter(name=topic, id_report=report,
language='english')
if v.count() > 0:
message = 'WARNING RUNS FILE - ' + runs[i].name + ' - The topic: ' + str(
topic) + ' is already associated to the document: ' + str(
el) + ' it will be ignored.'
elif isinstance(el,dict):
if Report.objects.filter(id_report=str(el['document_id'])).exists():
report = Report.objects.get(id_report=str(el['document_id']), language=el['language'])
topic = UseCase.objects.get(name=str(r['topic_id']))
v = TopicHasDocument.objects.filter(name=topic, id_report=report,
language=el['language'])
if v.count() > 0:
message = 'WARNING RUNS FILE - ' + runs[i].name + ' - The topic: ' + str(
topic) + ' is already associated to the document: ' + str(
el['document_id']) + ' it will be ignored.'
for el in tops:
if str(el) not in topics_ids:
message = 'RUNS FILE - ' + runs[i].name + ' - The topic: ' + str(el) + ' is not in the provided list of topics.'
return message
for el in docs:
if isinstance(el,dict):
el = el['document_id']
if str(el) not in documents_ids:
message = 'RUNS FILE - ' + runs[
i].name + ' - The document: ' + str(
el) + ' is not in the provided list of documents.'
return message
elif runs[i].name.endswith('.txt'):
# with open(runs[i], 'r') as f:
lines = runs[i].readlines()
tups = []
for line in lines:
if len(line.split()) == 2:
topic = line.split()[0]
tops.append(topic)
doc = line.split()[1]
docs.append(doc)
tups.append((topic, doc))
report = Report.objects.get(id_report=str(doc), language='english')
topic = UseCase.objects.get(name=str(topic))
v = TopicHasDocument.objects.filter(name=topic, id_report=report,
language='english')
if v.count() > 0:
message = 'WARNING RUNS FILE - ' + runs[i].name + ' - The topic: ' + str(
topic) + ' is already associated to the document: ' + str(
doc) + ' it will be ignored.'
elif len(line.split()) > 2: # TREC
topic = line.split()[0]
tops.append(topic)
doc = line.split()[2]
tups.append((topic, doc))
docs.append(doc)
report = Report.objects.get(id_report=str(doc), language='english')
topic = UseCase.objects.get(name=str(topic))
v = TopicHasDocument.objects.filter(name=topic, id_report=report,
language='english')
if v.count() > 0:
message = 'WARNING RUNS FILE - ' + runs[i].name + ' - The topic: ' + str(
topic) + ' is already associated to the document: ' + str(
doc) + ' it will be ignored.'
else:
message = 'RUNS FILE - ' + runs[
i].name + ' - txt file is not well formatted.'
return message
for el in tops:
if el not in topics_ids:
message = 'RUNS FILE - ' + runs[
i].name + ' - The topic: ' + str(el) + ' is not in the provided list of topics.'
return message
for el in docs:
if str(el) not in documents_ids:
message = 'RUNS FILE - ' + runs[
i].name + ' - The document: ' + str(
el) + ' is not in the provided list of documents.'
return message
if jsonAnn is not None and jsonDisp is not None:
if type_req == 'json_fields' and len(jsonAnn) == 0 and len(jsonDisp) == 0 and len(ann) == 0:
message = 'REPORT FIELDS TO ANNOTATE - You must provide at least one field to display and/or one field to display and annotate.'
return message
elif type_req == 'json_fields' and len(jsonAnn) == 0:
message = 'WARNING REPORT FIELDS TO ANNOTATE - ok, but with this configuration you will not be able to perform mention annotation and linking. Please, select also at least a field to annotate if you want to find some mentions and to link them'
return message
if type_req == 'labels' and len(labels) == 0:
message = 'LABELS - Please insert a labels file.'
return message
if type_req == 'concepts' and len(concepts) == 0:
message = 'CONCEPTS - Please insert a concepts file.'
return message
if type_req == 'reports' and len(reports) == 0:
message = 'REPORTS - Please insert a reports file.'
return message
if type_req == 'pubmed' and len(pubmedfiles) == 0:
message = 'PUBMED - Please insert a reports file.'
return message
return message
except (Exception, psycopg2.Error) as e:
print(e)
message = 'An error occurred in ' + type_req + ' file(s). Please check if it is similar to the example we provided.'
return message
def update_db_util(reports,pubmedfiles,labels,concepts,jsondisp,jsonann,jsondispup,jsonannup,jsonall,topics,runs,batch,tf_idf):
"""This method is run after having checked the files inserted for the update. It updates the db."""
filename = ''
today = str(date.today())
error_location = 'database'
usecases = []
sem_areas = []
created_file = False
cursor = connection.cursor()
try:
with transaction.atomic():
all_fields = []
fields = []
fields_to_ann = []
version = get_version()
if int(version) != 0:
json_resp = get_fields_from_json()
all_fields = json_resp['all_fields']
fields = json_resp['fields']
fields_to_ann = json_resp['fields_to_ann']
if jsonannup != '' or jsondispup != '' or jsonall != '':
data = {}
all_fields = []
fields = []
fields_to_ann = []
version = get_version()
if int(version) != 0:
json_resp = get_fields_from_json()
all_fields = json_resp['all_fields']
fields = json_resp['fields']
fields_to_ann = json_resp['fields_to_ann']
jsondispup = ''.join(jsondispup)
jsonannup = ''.join(jsonannup)
jsonall = ''.join(jsonall)
jsondispup = jsondispup.split(',')
jsonannup = jsonannup.split(',')
jsonall = jsonall.split(',')
for el in jsondispup:
if len(el) > 0:
if el not in all_fields:
all_fields.append(el)
if el not in fields:
fields.append(el)
for el in jsonannup:
if len(el) > 0:
if el not in fields_to_ann:
fields_to_ann.append(el)
if el not in all_fields:
all_fields.append(el)
for el in jsonall:
if el not in all_fields and el:
all_fields.append(el)
data['fields'] = fields
data['fields_to_ann'] = fields_to_ann
data['all_fields'] = all_fields
version = get_version()
version_new = int(version) + 1
filename = 'fields' + str(version_new)
workpath = os.path.dirname(os.path.abspath(__file__)) # Returns the Path your .py file is in
with open(os.path.join(workpath, '.config_files/data/' + filename + '.json'), 'w') as outfile:
json.dump(data, outfile)
created_file = True
if (len(reports) > 0 or len(pubmedfiles) > 0) or len(runs) > 0 or len(topics) > 0:
language = 'english'
arr_to_ret = elaborate_runs(runs)
error_location = 'Topic'
for topic in topics:
if topic.name.endswith('txt'):
elaborate_TREC_topic_files(arr_to_ret, topic)
elif topic.name.endswith('json'):
process_topic_json_file(arr_to_ret, topic)
elif topic.name.endswith('csv'):
process_topic_csv_file(arr_to_ret, topic)
error_location = 'Collection'
for file in reports:
reps = decompress_files([file])
for f in reps:
if isinstance(f, str):
file_name = f
workpath = os.path.dirname(
os.path.abspath(__file__)) # Returns the Path your .py file is in
f = os.path.join(workpath, 'static\\tmp\\' + f)
else:
file_name = f.name
if file_name.endswith('json'):
find_docs_in_json_collection(arr_to_ret, f)
elif file_name.endswith('csv'):
find_docs_in_csv_collection(arr_to_ret, f)
for file in pubmedfiles:
find_docs_in_json_pubmed_collection(arr_to_ret, file)
error_location = 'Runs'
for el in arr_to_ret:
if len(el) == 3:
language = el[2]
topic = UseCase.objects.get(name=el[0])
doc = Report.objects.get(id_report=el[1], language=language)
TopicHasDocument.objects.get_or_create(name=topic, language=doc.language, id_report=doc)
# Popolate the labels table
if len(labels) > 0:
labs = []
error_location = 'Labels'
for label_file in labels:
if label_file.name.endswith('csv'):
df_labels = pd.read_csv(label_file)
df_labels = df_labels.where(pd.notnull(df_labels), None)
df_labels = df_labels.reset_index(drop=True)
# df_labels['usecase'] = df_labels['usecase'].str.lower()
count_lab_rows = df_labels.shape[0]
for i in range(count_lab_rows):
label = str(df_labels.loc[i, 'label'])
labs.append(label.rstrip())
elif label_file.name.endswith('json'):
d = json.load(label_file)
labels = d['labels']
for label in labels:
labs.append(label.rstrip())
elif label_file.name.endswith('txt'):
lines = label_file.readlines()
for line in lines:
line = line.decode('utf-8')
labs.append(line.replace('\n', ''))
for label in labs:
cursor.execute('SELECT * FROM annotation_label')
ans = cursor.fetchall()
if len(ans) == 0:
seq_number = 1
else:
cursor.execute('SELECT seq_number FROM annotation_label ORDER BY seq_number DESC;')
ans = cursor.fetchall()
seq_number = int(ans[0][0]) + 1
cursor.execute("SELECT * FROM annotation_label WHERE label = %s;",
(str(label),))
ans = cursor.fetchall()
if len(ans) == 0:
cursor.execute("INSERT INTO annotation_label (label,seq_number) VALUES (%s,%s);",
(str(label), int(seq_number)))
# Popolate the concepts table
if len(concepts) > 0:
error_location = 'Concepts'
for concept_file in concepts:
if concept_file.name.endswith('csv'):
df_concept = pd.read_csv(concept_file)
df_concept = df_concept.where(pd.notnull(df_concept), None)
df_concept = df_concept.reset_index(drop=True)
# df_concept['usecase'] = df_concept['usecase'].str.lower()
# print(df_concept)
count_conc_rows = df_concept.shape[0]
for i in range(count_conc_rows):
df_concept = df_concept.where(pd.notnull(df_concept), None)
concept_url = str(df_concept.loc[i, 'concept_url'])
concept_name = str(df_concept.loc[i, 'concept_name'])
# usecase = str(df_concept.loc[i, 'usecase'])
# semantic_area = str(df_concept.loc[i, 'area'])
cursor.execute("SELECT concept_url,json_concept FROM concept WHERE concept_url = %s;",
(str(concept_url),))
ans = cursor.fetchall()
if len(ans) == 0:
# json_concept = json.dumps({'provenance': 'admin', 'insertion_author': 'admin'})
cursor.execute("INSERT INTO concept (concept_url,name) VALUES (%s,%s);",
(str(concept_url), str(concept_name)))
cursor.execute("SELECT * FROM belong_to WHERE concept_url = %s AND name=%s;",
(str(concept_url), 'default_area'))
ans = cursor.fetchall()
if len(ans) == 0:
cursor.execute("INSERT INTO belong_to (concept_url,name) VALUES (%s,%s);",
(str(concept_url), 'default_area'))
elif concept_file.name.endswith('json'):
d = json.load(concept_file)
count_conc_rows = len(d['concepts_list'])
for i in range(count_conc_rows):
concept_url = str(d['concepts_list'][i]['concept_url'])
concept_name = str(d['concepts_list'][i]['concept_name'])
# usecase = str(df_concept.loc[i, 'usecase'])
# semantic_area = str(df_concept.loc[i, 'area'])
cursor.execute("SELECT concept_url,json_concept FROM concept WHERE concept_url = %s;",
(str(concept_url),))
ans = cursor.fetchall()
if len(ans) == 0:
# json_concept = json.dumps({'provenance': 'admin', 'insertion_author': 'admin'})
cursor.execute("INSERT INTO concept (concept_url,name) VALUES (%s,%s);",
(str(concept_url), str(concept_name)))
cursor.execute("SELECT * FROM belong_to WHERE concept_url = %s AND name=%s;",
(str(concept_url), 'default_area'))
ans = cursor.fetchall()
if len(ans) == 0:
cursor.execute("INSERT INTO belong_to (concept_url,name) VALUES (%s,%s);",
(str(concept_url), 'default_area'))
if ((jsonann is not None) and (jsonann != '')) or ((jsondisp is not None) and jsondisp != ''):
data = {}
jsondisp = ''.join(jsondisp)
jsonann = ''.join(jsonann)
jsondisp = jsondisp.split(',')
jsonann = jsonann.split(',')
for el in jsondisp:
if len(el) > 0:
if el not in fields:
fields.append(el)
if el not in all_fields:
all_fields.append(el)
for el in jsonann:
if len(el) > 0:
if el not in fields_to_ann:
fields_to_ann.append(el)
if el not in all_fields:
all_fields.append(el)
data['fields'] = fields
data['all_fields'] = all_fields
data['fields_to_ann'] = fields_to_ann
version = get_version()
workpath = os.path.dirname(os.path.abspath(__file__)) # Returns the Path your .py file is in
version_new = int(version) + 1
filename = 'fields' + str(version_new)
with open(os.path.join(workpath, '.config_files/data/' + filename + '.json'), 'w') as outfile:
json.dump(data, outfile)
created_file = True
except (Exception,psycopg2.IntegrityError) as e:
print(e)
# connection.rollback()
print('rolledback')
if created_file == True:
workpath = os.path.dirname(os.path.abspath(__file__)) # Returns the Path your .py file is in
if filename != '' and filename != 'fields0':
path = os.path.join(workpath, '.config_files/data/'+filename+'.json')
os.remove(path)
json_resp = {'error': 'an error occurred in: ' + error_location + '. The configuration failed.'}
return json_resp
else:
# connection.commit()
if created_file == True:
for filen in os.listdir(os.path.join(workpath, 'config_files/data')):
if filen.endswith('json'):
print(filen)
if filen != '' and filen != 'fields0.json' and filen != filename + '.json':
path = os.path.join(workpath, '.config_files/data/' + filen)
os.remove(path)
if ((jsonann is not None) and (jsonann != '')) or ((jsondisp is not None) and jsondisp != ''):
outfile.close()
if tf_idf is not None :
print(str(tf_idf))
data = {}
cursor = connection.cursor()
cursor.execute('SELECT DISTINCT language FROM report')
ans = cursor.fetchall()
languages = []
for el in ans:
languages.append(el[0])
st = time.time()
if int(tf_idf) > 0:
workpath = os.path.dirname(os.path.abspath(__file__)) # Returns the Path your .py file is in
path1 = os.path.join(workpath, './config_files/config.json')
g = open(path1, 'r')
data = json.load(g)
data['TF-IDF_k'] = tf_idf
with open(path1, 'w') as f:
json.dump(data, f)
t = UseCase.objects.all()
cursor = connection.cursor()
json_to_write = {}
for top in t:
print('topic:' + str(top))
json_to_write[top.name] = {}
topic = {}
corpus = []
cursor.execute(
"SELECT r.id_report,r.language,r.report_json FROM report as r inner join topic_has_document as t on t.id_report = r.id_report and r.language = t.language where t.name = %s",
[str(top.name)])
ans = cursor.fetchall()
for el in ans:
e = json.loads(el[2])
r_j1 = {}
r_j1['document_id'] = str(el[0])
r_j1['text'] = ''
for k in e.keys():
if k != 'document_id' or (
str(el[0]).startswith('PUBMED_') and (k == 'abstract' or k == 'title')):
r_j1['text'] = r_j1['text'] + ' ' + str(e[k])
if el[1].lower() in LANGUAGES_NLTK:
corpus.append(r_j1)
topic['title'] = top.title
topic['description'] = top.description
# df_tfidf = gen_tfidf_map(corpus,language)
for el in ans:
if el[1].lower() in LANGUAGES_NLTK:
language = el[1].lower()
start = time.time()
print('working on ', str(el[0]))
e = json.loads(el[2])
r_j1 = {}
r_j1['document_id'] = str(el[0])
r_j1['text'] = ''
for k in e.keys():
# print(k)
# print(e[k])
if isinstance(e[k], list):
e[k] = ', '.join(e[k])
if k != 'document_id' and k != 'language' and e[k] is not None:
r_j1['text'] = r_j1['text'] + ' ' + e[k]
tfidf_matcher = QueryDocMatcher(topic=topic, doc=r_j1, corpus=corpus, language=language)
top_k_matching_words = tfidf_matcher.get_words_to_highlight()
# print(top_k_matching_words)
# json_val = {}
# json_val[str(el[0])] = top_k_matching_words
# json_val['words'] = top_k_matching_words
json_to_write[top.name][str(el[0])] = top_k_matching_words
# print(json_to_write)
end = time.time()
print('elaborated in ' + str(end - start) + ' seconds')
else:
json_to_write = {}
end = time.time()
print('time', end - st)
path2 = os.path.join(workpath, './config_files/tf_idf_map.json')
with open(path2, 'w') as f:
json.dump(json_to_write, f)
json_resp = {'message': 'Ok'}
return json_resp
| 54.459706
| 276
| 0.424124
| 13,450
| 137,184
| 4.204758
| 0.034572
| 0.019008
| 0.013297
| 0.010344
| 0.893111
| 0.873254
| 0.837483
| 0.810977
| 0.793525
| 0.768964
| 0
| 0.004746
| 0.479298
| 137,184
| 2,518
| 277
| 54.481334
| 0.786972
| 0.052105
| 0
| 0.792163
| 0
| 0.010417
| 0.190625
| 0.002582
| 0.000496
| 0
| 0
| 0
| 0
| 1
| 0.002976
| false
| 0.007937
| 0.003968
| 0.000992
| 0.047619
| 0.011409
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
2328fc599b6c9b0929d4c091b19227d81ebee758
| 989
|
py
|
Python
|
python--exercicios/ex053.py
|
Eliezer2000/python
|
12abb54c6536acb2f36b8f34bf51ec765857eb75
|
[
"MIT"
] | null | null | null |
python--exercicios/ex053.py
|
Eliezer2000/python
|
12abb54c6536acb2f36b8f34bf51ec765857eb75
|
[
"MIT"
] | null | null | null |
python--exercicios/ex053.py
|
Eliezer2000/python
|
12abb54c6536acb2f36b8f34bf51ec765857eb75
|
[
"MIT"
] | null | null | null |
frase = str(input('Digie uma frase : ')).strip().upper()
palavras = frase.split()
junto = ''.join(palavras)
print('Você digitou a frase {}'.format(junto))
inverso = ''
for letra in range(len(junto) - 1, - 1, - 1):
inverso += junto[letra]
if inverso == junto:
print('Temos um Palindromo')
else:
print('A frase digitada não é um palindro !')
frase = str(input('Digite uma frase : ')).strip().upper()
palavras = frase.split()
junto = ''.join(palavras)
inverso = ''
for letra in range(len(junto) -1, -1, -1):
inverso += junto[letra]
if inverso == junto:
print('Temos um palindromo')
else:
print('A frase digitada não é um palindro')
frase = str(input('Digite uma frase : ')).strip().upper()
palavras = frase.split()
junto = ''.join(palavras)
inverso = ''
for letra in range(len(junto) -1, -1, -1):
inverso += junto[letra]
if inverso == junto:
print('Temos um palindromo')
else:
print('A frase diditada não é um palindromo ')
| 21.977778
| 57
| 0.625885
| 136
| 989
| 4.551471
| 0.25
| 0.019386
| 0.063005
| 0.087237
| 0.878837
| 0.878837
| 0.878837
| 0.878837
| 0.878837
| 0.878837
| 0
| 0.011421
| 0.203236
| 989
| 44
| 58
| 22.477273
| 0.774112
| 0
| 0
| 0.806452
| 0
| 0
| 0.247959
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.225806
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
23458753b2b57a1cc8485ec44023b46ee10a1ee2
| 9,960
|
py
|
Python
|
test/unit/hsts_support_test.py
|
grauwoelfchen/pyramid_secure_response
|
ed162ec5ed0d18aecbcab946c15a2f0c4104ea22
|
[
"BSD-3-Clause"
] | 2
|
2017-11-29T13:51:55.000Z
|
2018-01-16T05:51:58.000Z
|
test/unit/hsts_support_test.py
|
grauwoelfchen/pyramid_secure_response
|
ed162ec5ed0d18aecbcab946c15a2f0c4104ea22
|
[
"BSD-3-Clause"
] | null | null | null |
test/unit/hsts_support_test.py
|
grauwoelfchen/pyramid_secure_response
|
ed162ec5ed0d18aecbcab946c15a2f0c4104ea22
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
from pyramid_secure_response.hsts_support import tween
@pytest.fixture(autouse=True)
def setup():
import logging
from pyramid_secure_response.hsts_support import logger
logger.setLevel(logging.ERROR)
@pytest.mark.parametrize('max_age,include_subdomains,preload,header', [
('3600', True, True, 'max-age=3600; includeSubDomains; preload'),
('1800', True, False, 'max-age=1800; includeSubDomains'),
('900', False, False, 'max-age=900'),
])
def test_build_hsts_header(max_age, include_subdomains, preload, header):
from collections import namedtuple
from pyramid_secure_response.hsts_support import build_hsts_header
Config = namedtuple('hsts_support', ( # pylint: disable=invalid-name
'max_age',
'include_subdomains',
'preload',
))
hsts_support = Config(max_age, include_subdomains, preload)
assert header == build_hsts_header(hsts_support)
def test_hsts_support_tween_with_disabled(mocker, dummy_request):
mocker.patch('pyramid_secure_response.hsts_support.apply_path_filter',
return_value=True)
mocker.patch('pyramid_secure_response.hsts_support.build_criteria',
return_value=[])
from pyramid.response import Response
from pyramid_secure_response.hsts_support import (
apply_path_filter,
build_criteria,
)
dummy_request.registry.settings = {
'pyramid_secure_response.hsts_support.enabled': 'False'
}
handler_stub = mocker.stub(name='handler_stub')
handler_stub.return_value = Response(status=200)
hsts_support_tween = tween(handler_stub, dummy_request.registry)
res = hsts_support_tween(dummy_request)
# pylint: disable=no-member
assert 1 == handler_stub.call_count
assert 0 == apply_path_filter.call_count
assert 0 == build_criteria.call_count
assert 'Strict-Transport-Security' not in res.headers
def test_hsts_support_tween_with_ignored_path(mocker, dummy_request):
mocker.patch('pyramid_secure_response.hsts_support.apply_path_filter',
return_value=True)
mocker.patch('pyramid_secure_response.hsts_support.build_criteria',
return_value=[])
from pyramid.response import Response
from pyramid_secure_response.hsts_support import (
apply_path_filter,
build_criteria,
)
dummy_request.path = '/humans.txt'
dummy_request.registry.settings = {
'pyramid_secure_response.hsts_support.enabled': 'True',
'pyramid_secure_response.hsts_support.ignore_paths': '\n/humans.txt\n'
}
handler_stub = mocker.stub(name='handler_stub')
handler_stub.return_value = Response(status=200)
hsts_support_tween = tween(handler_stub, dummy_request.registry)
res = hsts_support_tween(dummy_request)
# pylint: disable=no-member
assert 1 == handler_stub.call_count
assert 1 == apply_path_filter.call_count
apply_path_filter.assert_called_once_with(
dummy_request, ('/humans.txt',))
assert 0 == build_criteria.call_count
assert 'Strict-Transport-Security' not in res.headers
def test_hsts_tween_with_none_ssl_request(mocker, dummy_request):
from pyramid_secure_response import hsts_support
mocker.spy(hsts_support, 'apply_path_filter')
mocker.spy(hsts_support, 'build_criteria')
from pyramid.response import Response
from pyramid_secure_response.hsts_support import (
apply_path_filter,
build_criteria,
)
from pyramid_secure_response.util import get_config
dummy_request.url = 'http://example.org/'
dummy_request.registry.settings = {
'pyramid_secure_response.hsts_support.enabled': 'True',
'pyramid_secure_response.hsts_support.max_age': '31536000',
'pyramid_secure_response.hsts_support.include_subdomains': 'True',
'pyramid_secure_response.hsts_support.preload': 'True',
'pyramid_secure_response.hsts_support.proto_header': '',
'pyramid_secure_response.hsts_support.ignore_paths': '\n',
}
handler_stub = mocker.stub(name='handler_stub')
handler_stub.return_value = Response(status=200)
hsts_support_tween = tween(handler_stub, dummy_request.registry)
res = hsts_support_tween(dummy_request)
# pylint: disable=no-member
assert 1 == handler_stub.call_count
assert 1 == apply_path_filter.call_count
apply_path_filter.assert_called_once_with(dummy_request, tuple())
assert 1 == build_criteria.call_count
config = get_config(dummy_request.registry)
build_criteria.assert_called_once_with(
dummy_request, proto_header=config.hsts_support.proto_header)
assert 'Strict-Transport-Security' not in res.headers
def test_hsts_tween_with_ssl_request_plus_none_ssl_extra_header(
mocker, dummy_request):
from pyramid_secure_response import hsts_support
mocker.spy(hsts_support, 'apply_path_filter')
mocker.spy(hsts_support, 'build_criteria')
from pyramid.response import Response
from pyramid_secure_response.hsts_support import (
apply_path_filter,
build_criteria,
)
from pyramid_secure_response.util import get_config
dummy_request.url = 'https://example.org/'
dummy_request.headers['X-Forwarded-Proto'] = 'http'
dummy_request.registry.settings = {
'pyramid_secure_response.hsts_support.enabled': 'True',
'pyramid_secure_response.hsts_support.max_age': '3600',
'pyramid_secure_response.hsts_support.include_subdomains': 'True',
'pyramid_secure_response.hsts_support.preload': 'True',
'pyramid_secure_response.hsts_support.proto_header':
'X-Forwarded-Proto',
'pyramid_secure_response.hsts_support.ignore_paths': '\n',
}
handler_stub = mocker.stub(name='handler_stub')
handler_stub.return_value = Response(status=200)
hsts_support_tween = tween(handler_stub, dummy_request.registry)
res = hsts_support_tween(dummy_request)
# pylint: disable=no-member
assert 1 == handler_stub.call_count
assert 1 == apply_path_filter.call_count
apply_path_filter.assert_called_once_with(dummy_request, tuple())
assert 1 == build_criteria.call_count
config = get_config(dummy_request.registry)
build_criteria.assert_called_once_with(
dummy_request, proto_header=config.hsts_support.proto_header)
assert 'Strict-Transport-Security' not in res.headers
def test_hsts_tween_with_ssl_request(mocker, dummy_request):
from pyramid_secure_response import hsts_support
mocker.spy(hsts_support, 'apply_path_filter')
mocker.spy(hsts_support, 'build_criteria')
from pyramid.response import Response
from pyramid_secure_response.hsts_support import (
apply_path_filter,
build_criteria,
)
from pyramid_secure_response.util import get_config
dummy_request.url = 'https://example.org/'
dummy_request.registry.settings = {
'pyramid_secure_response.hsts_support.enabled': 'True',
'pyramid_secure_response.hsts_support.max_age': '300', # 5 minutes.
'pyramid_secure_response.hsts_support.include_subdomains': 'True',
'pyramid_secure_response.hsts_support.preload': 'True',
'pyramid_secure_response.hsts_support.proto_header': '',
'pyramid_secure_response.hsts_support.ignore_paths': '\n',
}
handler_stub = mocker.stub(name='handler_stub')
handler_stub.return_value = Response(status=200)
hsts_support_tween = tween(handler_stub, dummy_request.registry)
res = hsts_support_tween(dummy_request)
# pylint: disable=no-member
assert 1 == handler_stub.call_count
assert 1 == apply_path_filter.call_count
apply_path_filter.assert_called_once_with(dummy_request, tuple())
assert 1 == build_criteria.call_count
config = get_config(dummy_request.registry)
build_criteria.assert_called_once_with(
dummy_request, proto_header=config.hsts_support.proto_header)
assert 'Strict-Transport-Security' in res.headers
assert 'max-age=300; includeSubDomains; preload' == \
res.headers['Strict-Transport-Security']
def test_hsts_tween_with_ssl_request_plus_extra_header_check(
mocker, dummy_request):
from pyramid_secure_response import hsts_support
mocker.spy(hsts_support, 'apply_path_filter')
mocker.spy(hsts_support, 'build_criteria')
from pyramid.response import Response
from pyramid_secure_response.hsts_support import (
apply_path_filter,
build_criteria,
)
from pyramid_secure_response.util import get_config
dummy_request.url = 'https://example.org/'
dummy_request.headers['X-Forwarded-Proto'] = 'https'
dummy_request.registry.settings = {
'pyramid_secure_response.hsts_support.enabled': 'True',
'pyramid_secure_response.hsts_support.max_age': '604800', # 1 week
'pyramid_secure_response.hsts_support.include_subdomains': 'True',
'pyramid_secure_response.hsts_support.preload': 'True',
'pyramid_secure_response.hsts_support.proto_header':
'X-Forwarded-Proto',
'pyramid_secure_response.hsts_support.ignore_paths': '\n',
}
handler_stub = mocker.stub(name='handler_stub')
handler_stub.return_value = Response(status=200)
hsts_support_tween = tween(handler_stub, dummy_request.registry)
res = hsts_support_tween(dummy_request)
# pylint: disable=no-member
assert 1 == handler_stub.call_count
assert 1 == apply_path_filter.call_count
apply_path_filter.assert_called_once_with(dummy_request, tuple())
assert 1 == build_criteria.call_count
config = get_config(dummy_request.registry)
build_criteria.assert_called_once_with(
dummy_request, proto_header=config.hsts_support.proto_header)
assert 'Strict-Transport-Security' in res.headers
assert 'max-age=604800; includeSubDomains; preload' == \
res.headers['Strict-Transport-Security']
| 37.164179
| 78
| 0.740161
| 1,243
| 9,960
| 5.556718
| 0.083669
| 0.116259
| 0.145939
| 0.144781
| 0.920805
| 0.908643
| 0.891994
| 0.857246
| 0.850007
| 0.850007
| 0
| 0.01086
| 0.167972
| 9,960
| 267
| 79
| 37.303371
| 0.822614
| 0.020281
| 0
| 0.727723
| 0
| 0
| 0.248359
| 0.177267
| 0
| 0
| 0
| 0
| 0.178218
| 1
| 0.039604
| false
| 0
| 0.128713
| 0
| 0.168317
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
237c1b3acee43b916550b7ee38befd21304e4b7e
| 137
|
py
|
Python
|
week11/helloWorld.py
|
kmcooper/BMI8540
|
8becb1b2bdf4199684e470e09f0f4c8b93967e08
|
[
"MIT"
] | null | null | null |
week11/helloWorld.py
|
kmcooper/BMI8540
|
8becb1b2bdf4199684e470e09f0f4c8b93967e08
|
[
"MIT"
] | null | null | null |
week11/helloWorld.py
|
kmcooper/BMI8540
|
8becb1b2bdf4199684e470e09f0f4c8b93967e08
|
[
"MIT"
] | 1
|
2021-04-05T22:42:48.000Z
|
2021-04-05T22:42:48.000Z
|
# This program prints out Hello World!
#/usr/bin/python3.7
# Prints out Hello World
# Thats it. Thats the code.
print("Hello world!")
| 17.125
| 38
| 0.708029
| 22
| 137
| 4.409091
| 0.681818
| 0.309278
| 0.28866
| 0.391753
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.017699
| 0.175182
| 137
| 7
| 39
| 19.571429
| 0.840708
| 0.751825
| 0
| 0
| 0
| 0
| 0.413793
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
88d186180e893221bb313edf00982d1a6bb4683e
| 5,101
|
py
|
Python
|
rvpvp/isa/rvv/vmfxx_vf.py
|
ultrafive/riscv-pvp
|
843e38422c3d545352b955764927d5e7847e5453
|
[
"Unlicense"
] | 5
|
2021-05-10T09:57:00.000Z
|
2021-10-05T14:39:20.000Z
|
rvpvp/isa/rvv/vmfxx_vf.py
|
ultrafive/riscv-pvp
|
843e38422c3d545352b955764927d5e7847e5453
|
[
"Unlicense"
] | null | null | null |
rvpvp/isa/rvv/vmfxx_vf.py
|
ultrafive/riscv-pvp
|
843e38422c3d545352b955764927d5e7847e5453
|
[
"Unlicense"
] | 1
|
2021-05-14T20:24:11.000Z
|
2021-05-14T20:24:11.000Z
|
from ...isa.inst import *
import numpy as np
class Vmfeq_vf(Inst):
name = 'vmfeq.vf'
def golden(self):
if 'vs2' in self:
result = np.unpackbits( self['orig'], bitorder='little' )
if 'vstart' in self:
vstart = self['vstart']
else:
vstart = 0
if 'mask' in self:
mask = np.unpackbits(self['mask'], bitorder='little')[vstart: self['vl']]
else:
if self['vl'] >= vstart:
mask = np.ones( self['vl'] - vstart, dtype = np.uint8 )
for no in range(vstart, self['vl']):
if mask[ no - vstart ] == 1:
result[ no ] = self['rs1'] == self['vs2'][no]
result = np.packbits( result, bitorder='little' )
return result
else:
return 0
class Vmfne_vf(Inst):
name = 'vmfne.vf'
def golden(self):
if 'vs2' in self:
result = np.unpackbits( self['orig'], bitorder='little' )
if 'vstart' in self:
vstart = self['vstart']
else:
vstart = 0
if 'mask' in self:
mask = np.unpackbits(self['mask'], bitorder='little')[vstart: self['vl']]
else:
if self['vl'] >= vstart:
mask = np.ones( self['vl'] - vstart, dtype = np.uint8 )
for no in range(vstart, self['vl']):
if mask[ no - vstart ] == 1:
result[ no ] = self['rs1'] != self['vs2'][no]
result = np.packbits( result, bitorder='little' )
return result
else:
return 0
class Vmflt_vf(Inst):
name = 'vmflt.vf'
def golden(self):
if 'vs2' in self:
result = np.unpackbits( self['orig'], bitorder='little' )
if 'vstart' in self:
vstart = self['vstart']
else:
vstart = 0
if 'mask' in self:
mask = np.unpackbits(self['mask'], bitorder='little')[vstart: self['vl']]
else:
if self['vl'] >= vstart:
mask = np.ones( self['vl'] - vstart, dtype = np.uint8 )
for no in range(vstart, self['vl']):
if mask[ no - vstart ] == 1:
result[ no ] = self['vs2'][no] < self['rs1']
result = np.packbits( result, bitorder='little' )
return result
else:
return 0
class Vmfle_vf(Inst):
name = 'vmfle.vf'
def golden(self):
if 'vs2' in self:
result = np.unpackbits( self['orig'], bitorder='little' )
if 'vstart' in self:
vstart = self['vstart']
else:
vstart = 0
if 'mask' in self:
mask = np.unpackbits(self['mask'], bitorder='little')[vstart: self['vl']]
else:
if self['vl'] >= vstart:
mask = np.ones( self['vl'] - vstart, dtype = np.uint8 )
for no in range(vstart, self['vl']):
if mask[ no - vstart ] == 1:
result[ no ] = self['vs2'][no] <= self['rs1']
result = np.packbits( result, bitorder='little' )
return result
else:
return 0
class Vmfgt_vf(Inst):
name = 'vmfgt.vf'
def golden(self):
if 'vs2' in self:
result = np.unpackbits( self['orig'], bitorder='little' )
if 'vstart' in self:
vstart = self['vstart']
else:
vstart = 0
if 'mask' in self:
mask = np.unpackbits(self['mask'], bitorder='little')[vstart: self['vl']]
else:
if self['vl'] >= vstart:
mask = np.ones( self['vl'] - vstart, dtype = np.uint8 )
for no in range(vstart, self['vl']):
if mask[ no - vstart ] == 1:
result[ no ] = self['vs2'][no] > self['rs1']
result = np.packbits( result, bitorder='little' )
return result
else:
return 0
class Vmfge_vf(Inst):
name = 'vmfge.vf'
def golden(self):
if 'vs2' in self:
result = np.unpackbits( self['orig'], bitorder='little' )
if 'vstart' in self:
vstart = self['vstart']
else:
vstart = 0
if 'mask' in self:
mask = np.unpackbits(self['mask'], bitorder='little')[vstart: self['vl']]
else:
if self['vl'] >= vstart:
mask = np.ones( self['vl'] - vstart, dtype = np.uint8 )
for no in range(vstart, self['vl']):
if mask[ no - vstart ] == 1:
result[ no ] = self['vs2'][no] >= self['rs1']
result = np.packbits( result, bitorder='little' )
return result
else:
return 0
| 26.02551
| 89
| 0.440306
| 542
| 5,101
| 4.132841
| 0.077491
| 0.064286
| 0.085714
| 0.040179
| 0.929911
| 0.929911
| 0.929911
| 0.929911
| 0.929911
| 0.929911
| 0
| 0.014291
| 0.423838
| 5,101
| 195
| 90
| 26.158974
| 0.747873
| 0
| 0
| 0.84375
| 0
| 0
| 0.078839
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046875
| false
| 0
| 0.015625
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
88e9839f321bc9cffd29102df066220f61069f15
| 145
|
py
|
Python
|
choir/modeling/meta_arch/__init__.py
|
scwangdyd/large_vocabulary_hoi_detection
|
db7a4397c3050b1bf9a3f7473edf125e2b1046c4
|
[
"MIT"
] | 9
|
2021-11-13T17:14:07.000Z
|
2022-03-29T00:27:54.000Z
|
choir/modeling/meta_arch/__init__.py
|
scwangdyd/large_vocabulary_hoi_detection
|
db7a4397c3050b1bf9a3f7473edf125e2b1046c4
|
[
"MIT"
] | 1
|
2022-02-04T16:28:01.000Z
|
2022-02-04T16:28:01.000Z
|
choir/modeling/meta_arch/__init__.py
|
scwangdyd/large_vocabulary_hoi_detection
|
db7a4397c3050b1bf9a3f7473edf125e2b1046c4
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from .build import META_ARCH_REGISTRY, build_model
from .hoi_detector import HOIR
from .cascade_hoi_detector import CHOIR
| 36.25
| 50
| 0.793103
| 22
| 145
| 4.954545
| 0.681818
| 0.201835
| 0.311927
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007813
| 0.117241
| 145
| 4
| 51
| 36.25
| 0.84375
| 0.144828
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0019f7968180b909204c7c13b0c213a8f952cd40
| 29,772
|
py
|
Python
|
writer/newyork30.py
|
rayidghani/esp32
|
3b83c01e18bc952d4808ab6964625d00aab8366b
|
[
"MIT"
] | null | null | null |
writer/newyork30.py
|
rayidghani/esp32
|
3b83c01e18bc952d4808ab6964625d00aab8366b
|
[
"MIT"
] | null | null | null |
writer/newyork30.py
|
rayidghani/esp32
|
3b83c01e18bc952d4808ab6964625d00aab8366b
|
[
"MIT"
] | null | null | null |
# Code generated by font_to_py.py.
# Font: NewYork.ttf
# Cmd: ../../../micropython-font-to-py/font_to_py.py -x /System/Library/Fonts/NewYork.ttf 30 newyork30.py
version = '0.33'
def height():
return 30
def baseline():
return 23
def max_width():
return 29
def hmap():
return True
def reverse():
return False
def monospaced():
return False
def min_ch():
return 32
def max_ch():
return 126
_font =\
b'\x09\x00\x00\x00\x00\x00\x38\x00\x7c\x00\x7e\x00\x07\x00\x03\x00'\
b'\x01\x00\x01\x00\x01\x00\x03\x00\x06\x00\x1e\x00\x1c\x00\x18\x00'\
b'\x20\x00\x10\x00\x00\x00\x00\x00\x00\x00\x38\x00\x38\x00\x38\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x06\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\x00'\
b'\x00\x00\x38\x38\x38\x38\x38\x38\x38\x38\x10\x10\x10\x10\x10\x10'\
b'\x10\x00\x00\x00\x38\x38\x38\x00\x00\x00\x00\x00\x00\x00\x0c\x00'\
b'\x00\x00\x00\x00\x39\xc0\x39\xc0\x39\xc0\x39\xc0\x39\xc0\x10\x80'\
b'\x10\x80\x10\x80\x10\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00'\
b'\x00\x00\x04\x10\x04\x10\x04\x10\x04\x10\x04\x10\x04\x10\x04\x10'\
b'\x7f\xfe\x04\x10\x08\x10\x08\x10\x08\x20\x08\x20\x08\x20\x7f\xfe'\
b'\x08\x20\x08\x20\x08\x20\x08\x20\x08\x20\x08\x20\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x02\x00\x02\x00'\
b'\x02\x00\x0f\xc0\x3a\x70\x32\x30\x72\x10\x72\x10\x72\x00\x7a\x00'\
b'\x3e\x00\x3f\x00\x1f\xc0\x07\xe0\x03\xf0\x02\x78\x02\x38\x02\x38'\
b'\x42\x38\x42\x38\x62\x70\x72\xe0\x1f\x80\x02\x00\x02\x00\x02\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x17\x00\x00\x00\x00\x00\x00\x00'\
b'\x1e\x00\x80\x33\x00\x80\x21\x01\x00\x61\x81\x00\x61\x82\x00\x61'\
b'\x82\x00\x61\x84\x00\x61\x84\x00\x21\x08\x00\x33\x10\x00\x1e\x10'\
b'\xf0\x00\x21\x98\x00\x21\x08\x00\x43\x0c\x00\x43\x0c\x00\x83\x0c'\
b'\x00\x83\x0c\x01\x03\x0c\x01\x01\x08\x02\x01\x98\x02\x00\xf0\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x16\x00\x00\x00\x00\x00\x00\x00\x03\xe0\x00\x0e'\
b'\x38\x00\x0c\x1c\x00\x1c\x1c\x00\x1c\x1c\x00\x1c\x1c\x00\x1e\x18'\
b'\x00\x0f\x30\x00\x0f\xe0\x00\x07\xc0\xf0\x07\xe0\x40\x19\xf0\x40'\
b'\x38\xf8\x80\x30\x7d\x00\x70\x3e\x00\x70\x1f\x00\x70\x0f\x80\x78'\
b'\x07\xc0\x38\x1b\xe0\x1e\x20\xf0\x0f\xc0\x78\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x07\x00\x00\x00\x38\x38\x38\x38\x38\x10\x10\x10\x10\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x09\x00\x00\x80\x01\x00\x06\x00\x04\x00\x0c\x00\x18\x00\x18\x00'\
b'\x38\x00\x38\x00\x30\x00\x70\x00\x70\x00\x70\x00\x70\x00\x70\x00'\
b'\x70\x00\x70\x00\x70\x00\x30\x00\x38\x00\x38\x00\x18\x00\x0c\x00'\
b'\x0c\x00\x06\x00\x02\x00\x01\x00\x00\x00\x00\x00\x00\x00\x09\x00'\
b'\x80\x00\x40\x00\x20\x00\x10\x00\x18\x00\x1c\x00\x0c\x00\x0e\x00'\
b'\x0e\x00\x06\x00\x07\x00\x07\x00\x07\x00\x07\x00\x07\x00\x07\x00'\
b'\x07\x00\x07\x00\x06\x00\x0e\x00\x0e\x00\x0c\x00\x18\x00\x18\x00'\
b'\x30\x00\x20\x00\x40\x00\x00\x00\x00\x00\x00\x00\x0a\x00\x00\x00'\
b'\x00\x00\x0c\x00\x0c\x00\x69\x80\x7b\x80\x0c\x00\x37\x00\x6b\x80'\
b'\x4d\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x01\x00'\
b'\x01\x00\x01\x00\x01\x00\x01\x00\x7f\xfc\x01\x00\x01\x00\x01\x00'\
b'\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x20\x70'\
b'\x30\x30\x10\x20\x40\x00\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x70\x70\x70\x00'\
b'\x00\x00\x00\x00\x00\x00\x09\x00\x01\x00\x01\x00\x01\x00\x01\x00'\
b'\x02\x00\x02\x00\x02\x00\x02\x00\x04\x00\x04\x00\x04\x00\x04\x00'\
b'\x08\x00\x08\x00\x08\x00\x18\x00\x10\x00\x10\x00\x10\x00\x20\x00'\
b'\x20\x00\x20\x00\x20\x00\x40\x00\x40\x00\x40\x00\x40\x00\x00\x00'\
b'\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x03\xc0\x0c\x70\x18\x38'\
b'\x38\x18\x38\x1c\x30\x1c\x70\x0e\x70\x0e\x70\x0e\x70\x0e\x70\x0e'\
b'\x70\x0e\x70\x0e\x70\x0e\x70\x0e\x30\x0c\x38\x1c\x18\x1c\x1c\x18'\
b'\x0e\x30\x03\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x0b\x00\x00\x00\x00\x00\x06\x00\x1e\x00\x2e\x00\x0e\x00'\
b'\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00'\
b'\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00'\
b'\x7f\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0d\x00\x00\x00\x00\x00\x0f\x80\x1f\xe0\x21\xe0\x40\xf0\x00\x70'\
b'\x00\x70\x00\x70\x00\x70\x00\x60\x00\xe0\x00\xc0\x01\x80\x01\x80'\
b'\x03\x00\x06\x00\x0c\x00\x18\x00\x10\x00\x20\x00\x7f\xf0\x7f\xf0'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00'\
b'\x00\x00\x00\x00\x07\xc0\x1f\xe0\x30\xf0\x40\x70\x00\x70\x00\x70'\
b'\x00\x70\x00\xe0\x01\x80\x0f\x80\x01\xe0\x00\xf0\x00\x70\x00\x38'\
b'\x00\x38\x70\x38\x70\x38\x70\x30\x20\x70\x30\xe0\x0f\x80\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00'\
b'\x00\x00\x00\x18\x00\x38\x00\x78\x00\xb8\x00\xb8\x01\x38\x02\x38'\
b'\x02\x38\x04\x38\x04\x38\x08\x38\x10\x38\x10\x38\x20\x38\x7f\xff'\
b'\x7f\xff\x00\x38\x00\x38\x00\x38\x00\x38\x00\x38\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\x00\x00'\
b'\x0f\xf0\x0f\xf0\x10\x00\x10\x00\x10\x00\x10\x00\x20\x00\x20\x00'\
b'\x27\xc0\x38\xe0\x20\x70\x00\x78\x00\x38\x00\x38\x00\x38\x70\x38'\
b'\x70\x38\x70\x70\x60\x70\x30\xe0\x0f\x80\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x78'\
b'\x01\xc0\x03\x00\x0e\x00\x0c\x00\x1c\x00\x38\x00\x38\x00\x38\x00'\
b'\x73\xe0\x74\x70\x78\x38\x70\x1c\x70\x1c\x70\x1c\x70\x1c\x30\x1c'\
b'\x38\x18\x18\x38\x0c\x70\x07\xc0\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x0d\x00\x00\x00\x00\x00\x7f\xf0\x7f\xf0'\
b'\x00\x10\x00\x20\x00\x20\x00\x60\x00\x40\x00\xc0\x00\x80\x00\x80'\
b'\x01\x80\x01\x00\x03\x00\x03\x00\x06\x00\x06\x00\x0e\x00\x0e\x00'\
b'\x0c\x00\x1c\x00\x1c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00\x0f\xe0\x18\x78\x30\x38'\
b'\x70\x1c\x70\x1c\x70\x1c\x78\x18\x3c\x38\x1f\x60\x0f\xc0\x03\xf0'\
b'\x1c\xf8\x38\x3c\x30\x1e\x70\x0e\x70\x0e\x70\x0e\x70\x0e\x38\x1c'\
b'\x1c\x38\x07\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x0f\x00\x00\x00\x00\x00\x07\xc0\x1c\x60\x38\x30\x30\x38'\
b'\x70\x18\x70\x1c\x70\x1c\x70\x1c\x70\x1c\x38\x3c\x3c\x5c\x0f\x9c'\
b'\x00\x38\x00\x38\x00\x38\x00\x70\x00\x60\x00\xe0\x01\x80\x07\x00'\
b'\x3c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x07\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x38\x38\x38\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x38\x38\x38\x00\x00\x00\x00\x00\x00\x00'\
b'\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x70\x70\x70\x00\x00'\
b'\x00\x00\x00\x00\x00\x70\x70\x30\x10\x20\x20\x40\x00\x00\x00\x00'\
b'\x0f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x04\x00\x18\x00\x60\x01\x80\x06\x00\x18\x00\x60\x00'\
b'\x30\x00\x0c\x00\x03\x00\x00\x80\x00\x60\x00\x18\x00\x04\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x3f\xfc\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x3f\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x40\x00'\
b'\x30\x00\x0c\x00\x03\x00\x00\xc0\x00\x30\x00\x0c\x00\x18\x00\x20'\
b'\x00\xc0\x03\x00\x0c\x00\x30\x00\x40\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00'\
b'\x38\x00\x7c\x00\x7e\x00\x07\x00\x03\x00\x01\x00\x01\x00\x01\x00'\
b'\x03\x00\x06\x00\x1e\x00\x1c\x00\x18\x00\x20\x00\x10\x00\x00\x00'\
b'\x00\x00\x00\x00\x38\x00\x38\x00\x38\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x1a\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x1f\xc0\x00\x00\xe0\x70\x00\x01\x00'\
b'\x08\x00\x06\x00\x04\x00\x0c\x00\x02\x00\x08\x00\x01\x00\x10\x1e'\
b'\x21\x00\x30\x39\x60\x80\x20\x70\xc0\x80\x20\xe0\xc0\x80\x40\xe0'\
b'\xc0\x80\x40\xe0\xc0\x80\x41\xc1\xc0\x80\x41\xc1\x80\x80\x41\xc1'\
b'\x81\x00\x41\xc1\x81\x00\x41\xc3\x82\x00\x41\xc3\x86\x00\x20\xed'\
b'\x8c\x00\x20\x70\xf0\x00\x30\x00\x00\x00\x10\x00\x00\x00\x08\x00'\
b'\x00\x00\x06\x00\x00\x00\x03\x81\x80\x00\x00\xfe\x00\x00\x00\x00'\
b'\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x00\xc0\x00\x00\xc0\x00'\
b'\x00\xe0\x00\x01\xe0\x00\x01\xe0\x00\x02\x70\x00\x02\x70\x00\x02'\
b'\x70\x00\x04\x38\x00\x04\x38\x00\x04\x38\x00\x08\x1c\x00\x08\x1c'\
b'\x00\x0f\xfc\x00\x10\x0e\x00\x10\x0e\x00\x10\x0e\x00\x20\x07\x00'\
b'\x20\x07\x00\x60\x07\x80\xf8\x1f\xc0\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x12\x00'\
b'\x00\x00\x00\x00\x00\x00\x7f\xf0\x00\x1c\x1c\x00\x1c\x0e\x00\x1c'\
b'\x07\x00\x1c\x07\x00\x1c\x07\x00\x1c\x07\x00\x1c\x06\x00\x1c\x0e'\
b'\x00\x1c\x18\x00\x1f\xf8\x00\x1c\x1e\x00\x1c\x07\x00\x1c\x03\x80'\
b'\x1c\x03\x80\x1c\x03\x80\x1c\x03\x80\x1c\x03\x00\x1c\x07\x00\x1c'\
b'\x1c\x00\x7f\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\xfe\x00\x07\x03\x80\x0e\x01\xc0\x1c\x00\xc0\x38\x00'\
b'\x40\x38\x00\x40\x30\x00\x00\x70\x00\x00\x70\x00\x00\x70\x00\x00'\
b'\x70\x00\x00\x70\x00\x00\x70\x00\x00\x70\x00\x00\x38\x00\x00\x38'\
b'\x00\x40\x1c\x00\x40\x1c\x00\xc0\x0e\x01\xc0\x03\x83\x80\x00\xfe'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x15\x00\x00\x00\x00\x00\x00\x00\x7f\xf0'\
b'\x00\x1c\x1e\x00\x1c\x07\x00\x1c\x03\xc0\x1c\x01\xc0\x1c\x00\xe0'\
b'\x1c\x00\xe0\x1c\x00\x70\x1c\x00\x70\x1c\x00\x70\x1c\x00\x70\x1c'\
b'\x00\x70\x1c\x00\x70\x1c\x00\x70\x1c\x00\xe0\x1c\x00\xe0\x1c\x01'\
b'\xc0\x1c\x03\x80\x1c\x07\x00\x1c\x1c\x00\x7f\xf0\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x11\x00\x00\x00\x00\x00\x00\x00\x7f\xfe\x00\x1c\x0e\x00'\
b'\x1c\x02\x00\x1c\x02\x00\x1c\x02\x00\x1c\x00\x00\x1c\x00\x00\x1c'\
b'\x08\x00\x1c\x08\x00\x1c\x18\x00\x1f\xf8\x00\x1c\x18\x00\x1c\x08'\
b'\x00\x1c\x00\x00\x1c\x00\x00\x1c\x01\x00\x1c\x01\x00\x1c\x01\x00'\
b'\x1c\x03\x00\x1c\x07\x00\x7f\xff\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00'\
b'\x00\x00\x00\x00\x7f\xfe\x1c\x0e\x1c\x06\x1c\x02\x1c\x02\x1c\x00'\
b'\x1c\x00\x1c\x10\x1c\x10\x1c\x10\x1f\xf0\x1c\x10\x1c\x10\x1c\x00'\
b'\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x7f\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x14\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\xfe\x00\x07\x07\x80\x0e\x01\x80\x1c\x00\x80'\
b'\x38\x00\x80\x38\x00\x80\x30\x00\x00\x70\x00\x00\x70\x00\x00\x70'\
b'\x00\x00\x70\x00\x00\x70\x07\xf0\x70\x01\xc0\x70\x01\xc0\x38\x01'\
b'\xc0\x38\x01\xc0\x18\x01\xc0\x1c\x01\xc0\x0e\x01\xc0\x03\x83\x80'\
b'\x00\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x16\x00\x00\x00\x00\x00\x00\x00'\
b'\x7f\x03\xf8\x1c\x00\xe0\x1c\x00\xe0\x1c\x00\xe0\x1c\x00\xe0\x1c'\
b'\x00\xe0\x1c\x00\xe0\x1c\x00\xe0\x1c\x00\xe0\x1c\x00\xe0\x1f\xff'\
b'\xe0\x1c\x00\xe0\x1c\x00\xe0\x1c\x00\xe0\x1c\x00\xe0\x1c\x00\xe0'\
b'\x1c\x00\xe0\x1c\x00\xe0\x1c\x00\xe0\x1c\x00\xe0\x7f\x03\xf8\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00\x7f\x00\x1c\x00\x1c\x00'\
b'\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00'\
b'\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00'\
b'\x1c\x00\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x0a\x00\x00\x00\x00\x00\x3f\x80\x0e\x00\x0e\x00\x0e\x00'\
b'\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00'\
b'\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00'\
b'\x0c\x00\x1c\x00\x18\x00\x18\x00\x20\x00\x40\x00\x00\x00\x00\x00'\
b'\x12\x00\x00\x00\x00\x00\x00\x00\x7f\x07\xc0\x1c\x03\x00\x1c\x02'\
b'\x00\x1c\x04\x00\x1c\x08\x00\x1c\x10\x00\x1c\x10\x00\x1c\x20\x00'\
b'\x1c\x40\x00\x1c\xe0\x00\x1f\xe0\x00\x1c\xf0\x00\x1c\x78\x00\x1c'\
b'\x38\x00\x1c\x3c\x00\x1c\x1c\x00\x1c\x1e\x00\x1c\x0f\x00\x1c\x07'\
b'\x00\x1c\x07\x80\x7f\x03\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00'\
b'\x00\x00\x7f\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00'\
b'\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00'\
b'\x1c\x02\x1c\x02\x1c\x06\x1c\x06\x1c\x0e\x7f\xfe\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x19\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x7e\x00\x3f\x00\x1e\x00\x3c\x00\x1e\x00\x3c\x00'\
b'\x17\x00\x5c\x00\x17\x00\x5c\x00\x17\x00\x5c\x00\x13\x80\x9c\x00'\
b'\x13\x80\x9c\x00\x13\x80\x9c\x00\x11\xc1\x1c\x00\x11\xc1\x1c\x00'\
b'\x10\xe2\x1c\x00\x10\xe2\x1c\x00\x10\xe2\x1c\x00\x10\x74\x1c\x00'\
b'\x10\x74\x1c\x00\x10\x74\x1c\x00\x10\x38\x1c\x00\x10\x38\x1c\x00'\
b'\x10\x38\x1c\x00\x7c\x10\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00\x00\xf8\x03\xe0\x3c'\
b'\x00\x80\x3e\x00\x80\x2e\x00\x80\x2f\x00\x80\x27\x80\x80\x23\x80'\
b'\x80\x23\xc0\x80\x21\xe0\x80\x20\xe0\x80\x20\xf0\x80\x20\x70\x80'\
b'\x20\x78\x80\x20\x3c\x80\x20\x1c\x80\x20\x1e\x80\x20\x0f\x80\x20'\
b'\x07\x80\x20\x07\x80\x20\x03\x80\xf8\x01\x80\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x16\x00\x00\x00\x00\x00\x00\x00\x00\xfc\x00\x03\x07\x00\x0e\x01'\
b'\xc0\x1c\x00\xe0\x18\x00\x60\x38\x00\x70\x30\x00\x70\x70\x00\x38'\
b'\x70\x00\x38\x70\x00\x38\x70\x00\x38\x70\x00\x38\x70\x00\x38\x70'\
b'\x00\x38\x38\x00\x30\x38\x00\x70\x18\x00\x60\x1c\x00\xe0\x0e\x01'\
b'\xc0\x03\x83\x00\x00\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11\x00\x00\x00'\
b'\x00\x00\x00\x00\x7f\xf0\x00\x1c\x3c\x00\x1c\x0e\x00\x1c\x0e\x00'\
b'\x1c\x07\x00\x1c\x07\x00\x1c\x07\x00\x1c\x07\x00\x1c\x07\x00\x1c'\
b'\x0e\x00\x1c\x0e\x00\x1c\x38\x00\x1f\xe0\x00\x1c\x00\x00\x1c\x00'\
b'\x00\x1c\x00\x00\x1c\x00\x00\x1c\x00\x00\x1c\x00\x00\x1c\x00\x00'\
b'\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x16\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\xfc\x00\x03\x07\x00\x0e\x01\xc0\x1c\x00\xe0\x18\x00\x60\x38'\
b'\x00\x70\x30\x00\x70\x70\x00\x38\x70\x00\x38\x70\x00\x38\x70\x00'\
b'\x38\x70\x00\x38\x70\x00\x38\x70\x00\x38\x38\x00\x30\x38\x00\x70'\
b'\x18\x00\x60\x1c\x00\xe0\x0e\x01\xc0\x03\x83\x00\x00\xfc\x00\x00'\
b'\x1e\x00\x00\x0f\x00\x00\x07\xc0\x00\x01\xf8\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\x7f\xf0\x00\x1c'\
b'\x3c\x00\x1c\x0e\x00\x1c\x0f\x00\x1c\x07\x00\x1c\x07\x00\x1c\x07'\
b'\x00\x1c\x07\x00\x1c\x06\x00\x1c\x0e\x00\x1c\x38\x00\x1f\xf0\x00'\
b'\x1c\x78\x00\x1c\x38\x00\x1c\x3c\x00\x1c\x1c\x00\x1c\x0e\x00\x1c'\
b'\x0e\x00\x1c\x07\x00\x1c\x07\x00\x7f\x03\xc0\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0f\x00\x00\x00\x00\x00\x07\xe0\x1c\x38\x38\x18\x70\x08\x70\x08'\
b'\x70\x08\x78\x00\x7c\x00\x3e\x00\x1f\xc0\x0f\xf0\x01\xf8\x00\x78'\
b'\x00\x3c\x00\x1c\x00\x1c\x40\x1c\x40\x18\x60\x38\x78\x70\x0f\xc0'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x11\x00'\
b'\x00\x00\x00\x00\x00\x00\xff\xff\x80\xe1\xc3\x80\xc1\xc1\x80\x81'\
b'\xc0\x80\x81\xc0\x80\x01\xc0\x00\x01\xc0\x00\x01\xc0\x00\x01\xc0'\
b'\x00\x01\xc0\x00\x01\xc0\x00\x01\xc0\x00\x01\xc0\x00\x01\xc0\x00'\
b'\x01\xc0\x00\x01\xc0\x00\x01\xc0\x00\x01\xc0\x00\x01\xc0\x00\x01'\
b'\xc0\x00\x07\xf0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x00\x00'\
b'\x00\x00\xfe\x03\xe0\x38\x00\x80\x38\x00\x80\x38\x00\x80\x38\x00'\
b'\x80\x38\x00\x80\x38\x00\x80\x38\x00\x80\x38\x00\x80\x38\x00\x80'\
b'\x38\x00\x80\x38\x00\x80\x38\x00\x80\x38\x00\x80\x38\x00\x80\x38'\
b'\x00\x80\x1c\x01\x00\x1c\x01\x00\x0e\x02\x00\x07\x04\x00\x01\xf8'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x12\x00\x00\x00\x00\x00\x00\x00\xfe\x07'\
b'\xc0\x78\x01\x80\x38\x01\x00\x38\x01\x00\x1c\x02\x00\x1c\x02\x00'\
b'\x1c\x02\x00\x0e\x04\x00\x0e\x04\x00\x0e\x04\x00\x07\x08\x00\x07'\
b'\x08\x00\x07\x08\x00\x03\x90\x00\x03\x90\x00\x03\x90\x00\x01\xe0'\
b'\x00\x01\xe0\x00\x01\xc0\x00\x00\xc0\x00\x00\xc0\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x1d\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\x0f\xe0\xf8'\
b'\x3c\x03\x80\x60\x3c\x03\x80\x60\x1c\x03\x80\x40\x1c\x05\xc0\x40'\
b'\x1c\x05\xc0\x40\x0e\x05\xc0\x80\x0e\x08\xc0\x80\x0e\x08\xe0\x80'\
b'\x07\x08\xe1\x00\x07\x08\x61\x00\x07\x10\x71\x00\x07\x10\x72\x00'\
b'\x03\x90\x72\x00\x03\xa0\x32\x00\x03\xa0\x3a\x00\x01\xe0\x3c\x00'\
b'\x01\xc0\x3c\x00\x01\xc0\x1c\x00\x00\xc0\x18\x00\x00\xc0\x18\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x12\x00\x00\x00'\
b'\x00\x00\x00\x00\xfe\x07\xc0\x3c\x03\x00\x1c\x02\x00\x1e\x02\x00'\
b'\x0e\x04\x00\x0f\x08\x00\x07\x08\x00\x03\x90\x00\x03\xe0\x00\x01'\
b'\xe0\x00\x00\xe0\x00\x01\xe0\x00\x02\x70\x00\x02\x78\x00\x04\x38'\
b'\x00\x04\x1c\x00\x08\x1c\x00\x10\x0e\x00\x10\x0f\x00\x30\x07\x00'\
b'\xf8\x1f\xc0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x13\x00\x00\x00\x00\x00\x00\x00'\
b'\xff\x07\xe0\x3c\x01\x80\x1c\x01\x00\x0e\x02\x00\x0e\x02\x00\x07'\
b'\x04\x00\x07\x04\x00\x03\x88\x00\x03\xd0\x00\x01\xd0\x00\x00\xe0'\
b'\x00\x00\xe0\x00\x00\xe0\x00\x00\xe0\x00\x00\xe0\x00\x00\xe0\x00'\
b'\x00\xe0\x00\x00\xe0\x00\x00\xe0\x00\x00\xe0\x00\x03\xf8\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x11\x00\x00\x00\x00\x00\x00\x00\x3f\xff\x00\x38'\
b'\x0f\x00\x20\x0e\x00\x20\x1c\x00\x00\x3c\x00\x00\x38\x00\x00\x78'\
b'\x00\x00\x70\x00\x00\xe0\x00\x01\xe0\x00\x01\xc0\x00\x03\xc0\x00'\
b'\x03\x80\x00\x07\x80\x00\x0f\x00\x00\x0e\x00\x00\x1e\x01\x00\x1c'\
b'\x01\x00\x3c\x03\x00\x78\x07\x00\x7f\xff\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0a\x00\x1f\x80\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00'\
b'\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00'\
b'\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00'\
b'\x1c\x00\x1c\x00\x1c\x00\x1f\x80\x00\x00\x00\x00\x00\x00\x09\x00'\
b'\x40\x00\x40\x00\x40\x00\x40\x00\x20\x00\x20\x00\x20\x00\x20\x00'\
b'\x10\x00\x10\x00\x10\x00\x10\x00\x08\x00\x08\x00\x08\x00\x08\x00'\
b'\x04\x00\x04\x00\x04\x00\x04\x00\x02\x00\x02\x00\x02\x00\x02\x00'\
b'\x01\x00\x01\x00\x01\x00\x00\x00\x00\x00\x00\x00\x0a\x00\x7e\x00'\
b'\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00'\
b'\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00'\
b'\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00\x0e\x00'\
b'\x0e\x00\x7e\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00'\
b'\x01\x00\x02\x80\x02\x40\x04\x40\x08\x20\x08\x10\x10\x10\x20\x08'\
b'\x20\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfc\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\x00\x00\x00\x00\x0e\x00'\
b'\x06\x00\x06\x00\x03\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0e\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x1f\x80\x30\xe0\x70\x70\x70\x70'\
b'\x00\x70\x00\x70\x07\xf0\x18\x70\x30\x70\x70\x70\x70\x70\x70\x70'\
b'\x38\xf0\x1f\x3c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x10\x00\x00\x00\x00\x00\xf8\x00\x38\x00\x38\x00\x38\x00'\
b'\x38\x00\x38\x00\x38\x00\x39\xf0\x3e\x38\x38\x1c\x38\x1c\x38\x0e'\
b'\x38\x0e\x38\x0e\x38\x0e\x38\x0e\x38\x0e\x38\x1c\x38\x1c\x34\x38'\
b'\x23\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x0d\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\xc0\x1c\x70\x38\x70\x38\x70\x70\x00\x70\x00'\
b'\x70\x00\x70\x00\x70\x00\x70\x00\x38\x00\x3c\x00\x1e\x10\x07\xe0'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00'\
b'\x00\x00\x00\x00\x00\x7c\x00\x1c\x00\x1c\x00\x1c\x00\x1c\x00\x1c'\
b'\x00\x1c\x07\xdc\x1c\x3c\x38\x1c\x38\x1c\x70\x1c\x70\x1c\x70\x1c'\
b'\x70\x1c\x70\x1c\x70\x1c\x38\x1c\x38\x1c\x1c\x7c\x0f\x9f\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x07\xc0\x1c\x70\x38\x30\x30\x38\x70\x38\x7f\xf8\x70\x00\x70\x00'\
b'\x70\x00\x78\x00\x38\x00\x3c\x00\x1e\x10\x07\xe0\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x09\x00\x00\x00\x00\x00'\
b'\x0f\x00\x1f\x80\x3b\x80\x38\x00\x38\x00\x38\x00\x38\x00\x7f\x00'\
b'\x38\x00\x38\x00\x38\x00\x38\x00\x38\x00\x38\x00\x38\x00\x38\x00'\
b'\x38\x00\x38\x00\x38\x00\x38\x00\xfc\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xfe\x1c\x60'\
b'\x38\x30\x38\x38\x38\x38\x38\x38\x18\x30\x0c\x70\x03\xc0\x1c\x00'\
b'\x30\x00\x30\x00\x3f\xf0\x1f\xf8\x18\x1c\x30\x0c\x70\x0c\x70\x0c'\
b'\x70\x18\x3c\x30\x0f\xc0\x10\x00\x00\x00\x00\x00\xf8\x00\x38\x00'\
b'\x38\x00\x38\x00\x38\x00\x38\x00\x38\x00\x39\xf0\x3e\x38\x38\x1c'\
b'\x38\x1c\x38\x1c\x38\x1c\x38\x1c\x38\x1c\x38\x1c\x38\x1c\x38\x1c'\
b'\x38\x1c\x38\x1c\xfe\x7f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\x00\x00\x00\x00\x38\x38\x38\x00\x00\x00\x78'\
b'\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x7e\x00\x00\x00'\
b'\x00\x00\x00\x00\x07\x00\x00\x00\x00\x00\x38\x38\x38\x00\x00\x00'\
b'\x78\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x30'\
b'\x30\x60\x40\x80\x0f\x00\x00\x00\x00\x00\xf8\x00\x38\x00\x38\x00'\
b'\x38\x00\x38\x00\x38\x00\x38\x00\x38\x7c\x38\x30\x38\x20\x38\x40'\
b'\x38\x80\x39\x80\x3f\x80\x39\xc0\x38\xe0\x38\xe0\x38\x70\x38\x70'\
b'\x38\x38\xfc\x1c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x07\x00\x00\x00\xf8\x38\x38\x38\x38\x38\x38\x38\x38\x38'\
b'\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\xfe\x00\x00\x00\x00\x00'\
b'\x00\x00\x17\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf9'\
b'\xe1\xe0\x3e\x76\x70\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38'\
b'\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38\x38'\
b'\x38\x38\x38\x38\x38\x38\xfc\x7c\x7e\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\xf9\xf0\x3e\x38\x38\x1c\x38\x1c\x38\x1c\x38\x1c\x38\x1c'\
b'\x38\x1c\x38\x1c\x38\x1c\x38\x1c\x38\x1c\x38\x1c\xfe\x7f\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x07\xe0\x0c\x30\x18\x1c\x38\x1c\x70\x0e\x70\x0e\x70\x0e\x70\x0e'\
b'\x70\x0e\x70\x0e\x38\x1c\x38\x18\x1c\x30\x07\xe0\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x10\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf9\xf0'\
b'\x3e\x38\x38\x1c\x38\x1c\x38\x0e\x38\x0e\x38\x0e\x38\x0e\x38\x0e'\
b'\x38\x0e\x38\x1c\x38\x1c\x3c\x38\x3b\xe0\x38\x00\x38\x00\x38\x00'\
b'\x38\x00\x38\x00\x38\x00\xfe\x00\x10\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x07\xc4\x1c\x2c'\
b'\x38\x1c\x38\x1c\x70\x1c\x70\x1c\x70\x1c\x70\x1c\x70\x1c\x70\x1c'\
b'\x38\x1c\x38\x1c\x1c\x7c\x0f\x9c\x00\x1c\x00\x1c\x00\x1c\x00\x1c'\
b'\x00\x1c\x00\x1c\x00\x7f\x0b\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf9\xc0\x3b\xc0\x3c\x00'\
b'\x38\x00\x38\x00\x38\x00\x38\x00\x38\x00\x38\x00\x38\x00\x38\x00'\
b'\x38\x00\x38\x00\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x1f\x80\x30\xc0\x60\x40\x60\x40'\
b'\x70\x00\x78\x00\x3f\x00\x07\xc0\x01\xe0\x00\x60\x40\x60\x60\x60'\
b'\x70\xc0\x1f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x09\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x08\x00\x18\x00\x38\x00\x7f\x80\x38\x00\x38\x00\x38\x00\x38\x00'\
b'\x38\x00\x38\x00\x38\x00\x38\x00\x38\x00\x38\x00\x38\x00\x1c\x00'\
b'\x0f\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x10\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x38\x1c\x78\x3c\x38\x1c\x38\x1c\x38\x1c\x38\x1c'\
b'\x38\x1c\x38\x1c\x38\x1c\x38\x1c\x38\x1c\x38\x1c\x1c\x7c\x0f\x9f'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\xfe\x7c\x38\x10\x38\x10\x18\x20\x1c\x20\x1c\x20\x0e\x40'\
b'\x0e\x40\x0e\x40\x07\x80\x07\x80\x03\x00\x03\x00\x03\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x15\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\x7e\x78\x38\x38\x10\x38'\
b'\x38\x20\x38\x5c\x20\x18\x5c\x20\x1c\x4c\x40\x1c\x8e\x40\x0c\x8e'\
b'\x40\x0e\x86\x80\x0f\x07\x80\x07\x07\x80\x07\x03\x00\x06\x03\x00'\
b'\x02\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\x78\x38\x30'\
b'\x38\x20\x1c\x40\x0e\x80\x0e\x80\x07\x00\x07\x80\x0b\x80\x09\xc0'\
b'\x10\xe0\x10\xe0\x20\x70\xf1\xfc\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x0e\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfc\x7c\x78\x10\x38\x10'\
b'\x38\x20\x1c\x20\x1c\x40\x1c\x40\x0e\x40\x0e\x80\x06\x80\x07\x80'\
b'\x07\x00\x03\x00\x02\x00\x02\x00\x02\x00\x04\x00\x04\x00\x0c\x00'\
b'\x08\x00\x18\x00\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x7f\xe0\x60\xe0\x41\xc0\x41\x80'\
b'\x03\x80\x07\x00\x07\x00\x0e\x00\x0c\x00\x1c\x00\x18\x20\x38\x20'\
b'\x70\x60\x7f\xe0\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x09\x00\x07\x00\x0c\x00\x18\x00\x18\x00\x18\x00\x18\x00'\
b'\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x30\x00\xc0\x00'\
b'\x20\x00\x10\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00\x18\x00'\
b'\x18\x00\x18\x00\x18\x00\x0c\x00\x07\x00\x00\x00\x00\x00\x00\x00'\
b'\x09\x00\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00'\
b'\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00'\
b'\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00\x08\x00'\
b'\x08\x00\x08\x00\x08\x00\x08\x00\x00\x00\x00\x00\x00\x00\x09\x00'\
b'\x70\x00\x18\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00'\
b'\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x06\x00\x01\x80\x02\x00\x04\x00'\
b'\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00\x0c\x00'\
b'\x0c\x00\x18\x00\x70\x00\x00\x00\x00\x00\x00\x00\x0f\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x1c\x04\x22\x04\x41\x04\x40\x88\x40\x70'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'\
b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00'
_index =\
b'\x00\x00\x3e\x00\x5e\x00\x7e\x00\xbc\x00\xfa\x00\x38\x01\x94\x01'\
b'\xf0\x01\x10\x02\x4e\x02\x8c\x02\xca\x02\x08\x03\x28\x03\x66\x03'\
b'\x86\x03\xc4\x03\x02\x04\x40\x04\x7e\x04\xbc\x04\xfa\x04\x38\x05'\
b'\x76\x05\xb4\x05\xf2\x05\x30\x06\x50\x06\x70\x06\xae\x06\xec\x06'\
b'\x2a\x07\x68\x07\xe2\x07\x3e\x08\x9a\x08\xf6\x08\x52\x09\xae\x09'\
b'\xec\x09\x48\x0a\xa4\x0a\xe2\x0a\x20\x0b\x7c\x0b\xba\x0b\x34\x0c'\
b'\x90\x0c\xec\x0c\x48\x0d\xa4\x0d\x00\x0e\x3e\x0e\x9a\x0e\xf6\x0e'\
b'\x52\x0f\xcc\x0f\x28\x10\x84\x10\xe0\x10\x1e\x11\x5c\x11\x9a\x11'\
b'\xd8\x11\x16\x12\x54\x12\x92\x12\xd0\x12\x0e\x13\x4c\x13\x8a\x13'\
b'\xc8\x13\x06\x14\x44\x14\x64\x14\x84\x14\xc2\x14\xe2\x14\x3e\x15'\
b'\x7c\x15\xba\x15\xf8\x15\x36\x16\x74\x16\xb2\x16\xf0\x16\x2e\x17'\
b'\x6c\x17\xc8\x17\x06\x18\x44\x18\x82\x18\xc0\x18\xfe\x18\x3c\x19'\
b'\x7a\x19'
_mvfont = memoryview(_font)
_mvi = memoryview(_index)
ifb = lambda l : l[0] | (l[1] << 8)
def get_ch(ch):
oc = ord(ch)
ioff = 2 * (oc - 32 + 1) if oc >= 32 and oc <= 126 else 0
doff = ifb(_mvi[ioff : ])
width = ifb(_mvfont[doff : ])
next_offs = doff + 2 + ((width - 1)//8 + 1) * 30
return _mvfont[doff + 2:next_offs], 30, width
| 63.615385
| 105
| 0.707242
| 7,264
| 29,772
| 2.896063
| 0.027395
| 0.636307
| 0.797024
| 0.900699
| 0.760897
| 0.730332
| 0.694443
| 0.653943
| 0.598469
| 0.550269
| 0
| 0.416538
| 0.020657
| 29,772
| 467
| 106
| 63.751606
| 0.30497
| 0.005173
| 0
| 0.157778
| 1
| 0.931111
| 0.907274
| 0.906868
| 0
| 1
| 0
| 0
| 0
| 1
| 0.02
| false
| 0
| 0
| 0.017778
| 0.04
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
003a9685bec17a9415b100702335fa9e00d45ca2
| 148
|
py
|
Python
|
activitysimulations/watchingsimulation.py
|
TheImaginaryOne/movies-py
|
a3b601228cde6850919eaf84c0175f5ce3822e2b
|
[
"MIT"
] | null | null | null |
activitysimulations/watchingsimulation.py
|
TheImaginaryOne/movies-py
|
a3b601228cde6850919eaf84c0175f5ce3822e2b
|
[
"MIT"
] | null | null | null |
activitysimulations/watchingsimulation.py
|
TheImaginaryOne/movies-py
|
a3b601228cde6850919eaf84c0175f5ce3822e2b
|
[
"MIT"
] | null | null | null |
from domainmodel.movie import Movie
from domainmodel.user import User
from domainmodel.review import Review
class MovieWatchingSimulation:
pass
| 24.666667
| 37
| 0.844595
| 18
| 148
| 6.944444
| 0.5
| 0.36
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.128378
| 148
| 6
| 38
| 24.666667
| 0.968992
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.2
| 0.6
| 0
| 0.8
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
cc3bee54a2f2cfe376b47e81d9587a3619a3b474
| 159
|
py
|
Python
|
HackerRank/10 Days of Statistics/Day4B.py
|
ShubhamJagtap2000/competitive-programming-1
|
3a9a2e3dd08f8fa8ab823f295cd020d08d3bff84
|
[
"MIT"
] | 1
|
2021-07-22T07:37:31.000Z
|
2021-07-22T07:37:31.000Z
|
HackerRank/10 Days of Statistics/Day4B.py
|
ShubhamJagtap2000/competitive-programming-1
|
3a9a2e3dd08f8fa8ab823f295cd020d08d3bff84
|
[
"MIT"
] | null | null | null |
HackerRank/10 Days of Statistics/Day4B.py
|
ShubhamJagtap2000/competitive-programming-1
|
3a9a2e3dd08f8fa8ab823f295cd020d08d3bff84
|
[
"MIT"
] | null | null | null |
import math
print(round((pow(0.88, 10) + (1.2*pow(0.88, 9)) + (45*pow(0.88, 8)*pow(0.12, 2))), 3))
print(round((1 - (pow(0.88, 10) + (1.2*pow(0.88, 9)))), 3))
| 39.75
| 86
| 0.522013
| 38
| 159
| 2.184211
| 0.394737
| 0.289157
| 0.361446
| 0.192771
| 0.409639
| 0.409639
| 0.409639
| 0.409639
| 0.409639
| 0.409639
| 0
| 0.251799
| 0.125786
| 159
| 3
| 87
| 53
| 0.345324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 9
|
ccafc8dd350a7b473fa57c6480d7480ca3cc0213
| 187
|
py
|
Python
|
Week 1 Exercises/vara_varb.py
|
parkerbxyz/MITx-6.00.1x-2T2019a
|
58709fcf1acc380f4a5884d03739fdd621587540
|
[
"MIT"
] | 1
|
2019-09-25T06:59:55.000Z
|
2019-09-25T06:59:55.000Z
|
Week 1 Exercises/vara_varb.py
|
parkerbxyz/MITx-6.00.1x-2T2019a
|
58709fcf1acc380f4a5884d03739fdd621587540
|
[
"MIT"
] | null | null | null |
Week 1 Exercises/vara_varb.py
|
parkerbxyz/MITx-6.00.1x-2T2019a
|
58709fcf1acc380f4a5884d03739fdd621587540
|
[
"MIT"
] | null | null | null |
if type(varA) is str or type(varB) is str:
print('string involved')
elif varA == varB:
print('equal')
elif varA > varB:
print('bigger')
elif varA < varB:
print('smaller')
| 20.777778
| 42
| 0.631016
| 28
| 187
| 4.214286
| 0.5
| 0.20339
| 0.305085
| 0.432203
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.219251
| 187
| 8
| 43
| 23.375
| 0.808219
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
ccbbe3f7f8abd59eb83d99bd5e50d90f00755f5b
| 13,267
|
py
|
Python
|
geokey/applications/tests/test_views.py
|
universityofsussex/geokey
|
25e161dbc81841c57c148053dbe99facc81e84b8
|
[
"Apache-2.0"
] | null | null | null |
geokey/applications/tests/test_views.py
|
universityofsussex/geokey
|
25e161dbc81841c57c148053dbe99facc81e84b8
|
[
"Apache-2.0"
] | null | null | null |
geokey/applications/tests/test_views.py
|
universityofsussex/geokey
|
25e161dbc81841c57c148053dbe99facc81e84b8
|
[
"Apache-2.0"
] | null | null | null |
"""Tests for views of applications."""
from django.test import TestCase
from django.core.urlresolvers import reverse
from django.contrib.auth.models import AnonymousUser
from django.http import HttpResponseRedirect
from nose.tools import raises
from oauth2_provider.models import AccessToken
from rest_framework.test import APIRequestFactory
from geokey.projects.tests.model_factories import UserFactory
from ..views import (
ApplicationOverview, ApplicationCreate, ApplicationSettings,
ApplicationDelete, ApplicationConnected, ApplicationDisconnect
)
from ..models import Application
from .model_factories import ApplicationFactory
class ApplicationOverviewTest(TestCase):
def test_get_with_user(self):
view = ApplicationOverview.as_view()
url = reverse('admin:app_overview')
request = APIRequestFactory().get(url)
request.user = UserFactory.create()
response = view(request).render()
self.assertEqual(response.status_code, 200)
def test_get_with_anonymous(self):
view = ApplicationOverview.as_view()
url = reverse('admin:app_overview')
request = APIRequestFactory().get(url)
request.user = AnonymousUser()
response = view(request)
self.assertTrue(isinstance(response, HttpResponseRedirect))
class ApplicationConnectedTest(TestCase):
def test_get_with_user(self):
view = ApplicationConnected.as_view()
url = reverse('admin:app_connected')
request = APIRequestFactory().get(url)
request.user = UserFactory.create()
response = view(request).render()
self.assertEqual(response.status_code, 200)
def test_get_with_anonymous(self):
view = ApplicationConnected.as_view()
url = reverse('admin:app_connected')
request = APIRequestFactory().get(url)
request.user = AnonymousUser()
response = view(request)
self.assertTrue(isinstance(response, HttpResponseRedirect))
class ApplicationDisconnectTest(TestCase):
def setUp(self):
self.user = UserFactory.create()
self.app = ApplicationFactory.create()
self.token = AccessToken.objects.create(
user=self.user,
application=self.app,
token='df0af6a395b4cd072445b3832e9379bfee257da0',
scope=1,
expires='2030-12-31T23:59:01+00:00'
)
@raises(AccessToken.DoesNotExist)
def test_get_with_user(self):
view = ApplicationDisconnect.as_view()
url = reverse('admin:app_disconnect', kwargs={'app_id': self.app.id})
request = APIRequestFactory().get(url)
from django.contrib.messages.storage.fallback import FallbackStorage
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
request.user = self.user
response = view(request, app_id=self.app.id)
self.assertTrue(isinstance(response, HttpResponseRedirect))
AccessToken.objects.get(pk=self.token.id)
def test_get_with_anonymous(self):
view = ApplicationDisconnect.as_view()
url = reverse('admin:app_disconnect', kwargs={'app_id': self.app.id})
request = APIRequestFactory().get(url)
request.user = AnonymousUser()
response = view(request, app_id=self.app.id)
self.assertTrue(isinstance(response, HttpResponseRedirect))
self.assertIsNotNone(AccessToken.objects.get(pk=self.token.id))
def test_get_with_unconnected_user(self):
view = ApplicationDisconnect.as_view()
url = reverse('admin:app_disconnect', kwargs={'app_id': self.app.id})
request = APIRequestFactory().get(url)
request.user = UserFactory.create()
response = view(request, app_id=self.app.id)
self.assertTrue(isinstance(response, HttpResponseRedirect))
class ApplicationCreateTest(TestCase):
def test_get_with_user(self):
view = ApplicationCreate.as_view()
url = reverse('admin:app_register')
request = APIRequestFactory().get(url)
request.user = UserFactory.create()
response = view(request).render()
self.assertEqual(response.status_code, 200)
def test_get_with_anonymous(self):
view = ApplicationCreate.as_view()
url = reverse('admin:app_register')
request = APIRequestFactory().get(url)
request.user = AnonymousUser()
response = view(request)
self.assertTrue(isinstance(response, HttpResponseRedirect))
def test_post_with_user(self):
data = {
'name': 'test app',
'description:': '',
'download_url': 'http://example.com',
'redirect_uris': 'http://example.com',
'authorization_grant_type': 'password',
'skip_authorization': False,
}
view = ApplicationCreate.as_view()
url = reverse('admin:app_register')
request = APIRequestFactory().post(url, data)
request.user = UserFactory.create()
from django.contrib.messages.storage.fallback import FallbackStorage
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(Application.objects.count(), 1)
def test_post_with_anonymous(self):
data = {
'name': 'test app',
'description': '',
'download_url': 'http://example.com',
'redirect_uris': 'http://example.com',
'authorization_grant_type': 'password',
'skip_authorization': False,
}
view = ApplicationCreate.as_view()
url = reverse('admin:app_register')
request = APIRequestFactory().post(url, data)
request.user = AnonymousUser()
response = view(request)
self.assertEqual(response.status_code, 302)
self.assertEqual(Application.objects.count(), 0)
class ApplicationSettingsTest(TestCase):
def setUp(self):
self.creator = UserFactory.create()
self.app = ApplicationFactory.create(**{'user': self.creator})
def test_get_with_creator(self):
view = ApplicationSettings.as_view()
url = reverse('admin:app_settings', kwargs={'app_id': self.app.id})
request = APIRequestFactory().get(url)
request.user = self.creator
response = view(request, app_id=self.app.id).render()
self.assertEqual(response.status_code, 200)
self.assertNotContains(
response,
'You are not the owner of this application and therefore not'
'allowed to access this app.'
)
def test_get_with_user(self):
view = ApplicationSettings.as_view()
url = reverse('admin:app_settings', kwargs={'app_id': self.app.id})
request = APIRequestFactory().get(url)
request.user = UserFactory.create()
response = view(request, app_id=self.app.id).render()
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'You are not the owner of this application and therefore not '
'allowed to access this app.'
)
def test_get_with_anonymous(self):
view = ApplicationSettings.as_view()
url = reverse('admin:app_settings', kwargs={'app_id': self.app.id})
request = APIRequestFactory().get(url)
request.user = AnonymousUser()
response = view(request, app_id=self.app.id)
self.assertTrue(isinstance(response, HttpResponseRedirect))
def test_post_with_creator(self):
data = {
'name': 'test app',
'description': '',
'download_url': 'http://example.com',
'redirect_uris': 'http://example.com',
'authorization_grant_type': 'password',
'skip_authorization': True,
}
view = ApplicationSettings.as_view()
url = reverse('admin:app_settings', kwargs={'app_id': self.app.id})
request = APIRequestFactory().post(url, data)
request.user = self.creator
from django.contrib.messages.storage.fallback import FallbackStorage
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
response = view(request, app_id=self.app.id).render()
self.assertEqual(response.status_code, 200)
self.assertNotContains(
response,
'You are not the owner of this application and therefore not'
'allowed to access this app.'
)
ref = Application.objects.get(pk=self.app.id)
self.assertEqual(ref.name, data.get('name'))
self.assertEqual(ref.description, data.get('description'))
self.assertEqual(ref.download_url, data.get('download_url'))
self.assertEqual(ref.redirect_uris, data.get('redirect_uris'))
self.assertEqual(
ref.authorization_grant_type,
data.get('authorization_grant_type')
)
def test_post_with_user(self):
data = {
'name': 'test app',
'description': '',
'download_url': 'http://example.com/download',
'redirect_uris': 'http://example.com/redirect',
'authorization_grant_type': 'password',
'skip_authorization': True,
}
view = ApplicationSettings.as_view()
url = reverse('admin:app_settings', kwargs={'app_id': self.app.id})
request = APIRequestFactory().post(url, data)
request.user = UserFactory.create()
response = view(request, app_id=self.app.id).render()
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'You are not the owner of this application and therefore not '
'allowed to access this app.'
)
ref = Application.objects.get(pk=self.app.id)
self.assertNotEqual(ref.name, data.get('name'))
self.assertNotEqual(ref.description, data.get('description'))
self.assertNotEqual(ref.download_url, data.get('download_url'))
self.assertNotEqual(ref.redirect_uris, data.get('redirect_uris'))
self.assertNotEqual(
ref.authorization_grant_type,
data.get('authorization_grant_type')
)
def test_post_with_anonymous(self):
data = {
'name': 'test app',
'description': '',
'download_url': 'http://example.com/download',
'redirect_uris': 'http://example.com/redirect',
'authorization_grant_type': 'password',
'skip_authorization': True,
}
view = ApplicationSettings.as_view()
url = reverse('admin:app_settings', kwargs={'app_id': self.app.id})
request = APIRequestFactory().post(url, data)
request.user = AnonymousUser()
response = view(request, app_id=self.app.id)
self.assertTrue(isinstance(response, HttpResponseRedirect))
ref = Application.objects.get(pk=self.app.id)
self.assertNotEqual(ref.name, data.get('name'))
self.assertNotEqual(ref.description, data.get('description'))
self.assertNotEqual(ref.download_url, data.get('download_url'))
self.assertNotEqual(ref.redirect_uris, data.get('redirect_uris'))
self.assertNotEqual(
ref.authorization_grant_type,
data.get('authorization_grant_type')
)
class ApplicationDeleteTest(TestCase):
def setUp(self):
self.creator = UserFactory.create()
self.app = ApplicationFactory.create(**{'user': self.creator})
def test_get_with_creator(self):
view = ApplicationDelete.as_view()
url = reverse('admin:app_delete', kwargs={'app_id': self.app.id})
request = APIRequestFactory().get(url)
from django.contrib.messages.storage.fallback import FallbackStorage
setattr(request, 'session', 'session')
messages = FallbackStorage(request)
setattr(request, '_messages', messages)
request.user = self.creator
response = view(request, app_id=self.app.id)
self.assertTrue(isinstance(response, HttpResponseRedirect))
def test_get_with_user(self):
view = ApplicationDelete.as_view()
url = reverse('admin:app_delete', kwargs={'app_id': self.app.id})
request = APIRequestFactory().get(url)
request.user = UserFactory.create()
response = view(request, app_id=self.app.id).render()
self.assertEqual(response.status_code, 200)
self.assertContains(
response,
'You are not the owner of this application and therefore not '
'allowed to access this app.'
)
def test_get_with_anonymous(self):
view = ApplicationDelete.as_view()
url = reverse('admin:app_delete', kwargs={'app_id': self.app.id})
request = APIRequestFactory().get(url)
request.user = AnonymousUser()
response = view(request, app_id=self.app.id)
self.assertTrue(isinstance(response, HttpResponseRedirect))
| 38.792398
| 77
| 0.647923
| 1,388
| 13,267
| 6.054755
| 0.097262
| 0.030343
| 0.036411
| 0.034269
| 0.883984
| 0.881128
| 0.86792
| 0.863398
| 0.845312
| 0.842337
| 0
| 0.007512
| 0.237431
| 13,267
| 341
| 78
| 38.906158
| 0.823169
| 0.002412
| 0
| 0.806228
| 0
| 0
| 0.145967
| 0.019427
| 0
| 0
| 0
| 0
| 0.148789
| 1
| 0.079585
| false
| 0.017301
| 0.051903
| 0
| 0.152249
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
cccf817cf51b708eaffec6a6225223c0eb0ab492
| 110
|
py
|
Python
|
qtrader/simulation/tests/__init__.py
|
aaron8tang/qtrader
|
9bd50fd173c7b55707e91d75985055bbe8664548
|
[
"Apache-2.0"
] | 381
|
2017-10-25T19:17:04.000Z
|
2021-03-02T08:46:53.000Z
|
qtrader/simulation/tests/__init__.py
|
362115815/qtrader
|
e5c1e175e19b20381f9140fb76c30ad5cb81f01c
|
[
"Apache-2.0"
] | 3
|
2018-02-13T23:19:40.000Z
|
2018-12-03T22:50:58.000Z
|
qtrader/simulation/tests/__init__.py
|
362115815/qtrader
|
e5c1e175e19b20381f9140fb76c30ad5cb81f01c
|
[
"Apache-2.0"
] | 145
|
2017-10-25T19:17:06.000Z
|
2021-02-15T04:54:08.000Z
|
from qtrader.simulation.tests.arbitrage import Arbitrage
from qtrader.simulation.tests.moments import Moments
| 36.666667
| 56
| 0.872727
| 14
| 110
| 6.857143
| 0.5
| 0.229167
| 0.4375
| 0.541667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072727
| 110
| 2
| 57
| 55
| 0.941176
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ccd3dea82b78f398839e7a993414c4894c3e33fe
| 105,660
|
py
|
Python
|
Gina.py
|
Zekx/CS332Fighting_Game
|
4dbded68f8ebe955db29b12f8d3409db2710c019
|
[
"CECILL-B"
] | null | null | null |
Gina.py
|
Zekx/CS332Fighting_Game
|
4dbded68f8ebe955db29b12f8d3409db2710c019
|
[
"CECILL-B"
] | null | null | null |
Gina.py
|
Zekx/CS332Fighting_Game
|
4dbded68f8ebe955db29b12f8d3409db2710c019
|
[
"CECILL-B"
] | null | null | null |
import pygame
from boxes import HurtBox
from boxes import HitBox
from boxes import DamageBox
from boxes import InvincibleBox
from boxes import GrabBox
from projectile import GinaFireBall
from effects import *
from Character import Character
class Gina(Character):
def __init__(self):
"""
Initializes the class for the character Gina.
Stores her animations of all of her attacks and saves the statistics for the player.
:return:
"""
super().__init__()
self.name = 'Gina'
self.health = 200
self.meter = 100
self.jump_height = 400
self.portrait = pygame.image.load('Sprites/Gina/GinaPortrait.png')
self.image = pygame.image.load('Sprites/Gina/GinaStand1.png')
self.rect = self.image.get_rect()
self.x_offset = 10
self.y_offset = 30
self.dashImage = pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaFDash.png'), (180, 250))
self.neutralPosition = pygame.image.load('Sprites/Gina/GinaStand1.png')
self.jumpImage = pygame.image.load('Sprites/Gina/GinaJump4.png')
# The following loops add in all of the sprites for the animation...
# Inserts Gina's Victory animation.
for x in range(0, 120):
self.victory_animation.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaWin.png'), (130, 250)))
# Inserts Gina's Time Out Lose animation
for x in range(0, 60):
self.defeat_animation.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaTimeLose.png'), (130, 250)))
# Inserts Gina's Crumble Lose animation
for x in range(0, 15):
self.crumble_animation.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaHurt1.png'), (130, 250)))
for x in range(0, 15):
self.crumble_animation.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaHurt2.png'), (130, 250)))
for x in range(0, 30):
self.crumble_animation.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaKnockdown.png'), (240, 130)))
# Inserts Gina's standing animation.
for x in range(0, 30):
self.standing.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaStand1.png'), (130, 250)))
for x in range(0, 30):
self.standing.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaStand2.png'), (130, 250)))
for x in range(0, 30):
self.standing.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaStand3.png'), (130, 250)))
#Inserts Gina's crouching animation.
for x in range(0, 30):
self.crouching.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaCrouch1.png'), (130, 250)))
for x in range(0, 30):
self.crouching.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaCrouch2.png'), (130, 250)))
for x in range(0, 30):
self.crouching.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaCrouch3.png'), (130, 250)))
#Inserts Gina's dash animation.
for x in range(0, 85):
self.dash.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaFDash.png'), (180, 250)))
#Inserts Gina's back-dash animation.
for x in range(0, 75):
self.backdash.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaBDash.png'), (150, 250)))
#Inserts Gina's jumping animation.
for x in range(0, 30):
self.jump.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump1.png'), (130, 250)))
for x in range(0, 30):
self.jump.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump2.png'), (130, 250)))
for x in range(0, 80):
self.jump.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump3.png'), (130, 250)))
for x in range(0, 100):
self.jump.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump4.png'), (130, 250)))
#Inserts Gina's Forward walking animation.
for x in range(0, 15):
self.walkFoward.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaFWalk1.png'), (130, 250)))
for x in range(0, 15):
self.walkFoward.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaFWalk2.png'), (130, 250)))
for x in range(0, 15):
self.walkFoward.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaFWalk3.png'), (130, 250)))
for x in range(0, 15):
self.walkFoward.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaFWalk4.png'), (130, 250)))
for x in range(0, 15):
self.walkFoward.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaFWalk5.png'), (130, 250)))
for x in range(0, 15):
self.walkFoward.append(pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaFWalk6.png'), (130, 250)))
#Inserts Gina's Backward walking animation.
for x in range(0, 20):
self.walkBackward.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/GinaBWalk1.png'), (130, 250)))
for x in range(0, 20):
self.walkBackward.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/GinaBWalk2.png'), (130, 250)))
for x in range(0, 20):
self.walkBackward.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/GinaBWalk3.png'), (130, 250)))
for x in range(0, 20):
self.walkBackward.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/GinaBWalk4.png'), (130, 250)))
# Inserts Gina's standing A attack animation.
for x in range(0, 2):
self.stand_a_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandA1.png'), (130, 250)
))
for x in range(0, 2):
self.stand_a_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandA2.png'), (130, 250)
))
for x in range(0, 6):
self.stand_a_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandA3.png'), (150, 250)
))
for x in range(0, 3):
self.stand_a_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandA2.png'), (130, 250)
))
# Inserts Gina's crouching A attack animation.
for x in range(0, 4):
self.crouch_a_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaCrouchA1.png'), (150, 250)
))
for x in range(0, 4):
self.crouch_a_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaCrouchA2.png'), (150, 250)
))
for x in range(0, 6):
self.crouch_a_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaCrouchA3.png'), (150, 250)
))
for x in range(0, 6):
self.crouch_a_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaCrouchA2.png'), (150, 250)
))
# Inserts Gina's jumping A attack animation.
for x in range(0, 10):
self.jump_a_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaJumpA1.png'), (130, 250)
))
# Inserts Gina's standing B attack animation
for x in range(0, 3):
self.stand_b_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandB1-1.png'), (200, 250)
))
for x in range(0, 4):
self.stand_b_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandB1.png'), (200, 250)
))
for x in range(0, 6):
self.stand_b_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandB2.png'), (200, 250)
))
for x in range(0, 13):
self.stand_b_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandB3.png'), (200, 250)
))
for x in range(0, 7):
self.stand_b_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandB4.png'), (200, 250)
))
for x in range(0, 5):
self.stand_b_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandB1.png'), (200, 250)
))
for x in range(0, 3):
self.stand_b_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandB1-1.png'), (200, 250)
))
# Inserts Gina's crouching B attack animation
for x in range(0, 8):
self.crouch_b_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaCrouchB1.png'), (130, 250)
))
for x in range(0, 6):
self.crouch_b_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaCrouchB2.png'), (130, 250)
))
for x in range(0, 6):
self.crouch_b_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaCrouchB3.png'), (130, 250)
))
for x in range(0, 8):
self.crouch_b_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaCrouchB2.png'), (130, 250)
))
for x in range(0, 8):
self.crouch_b_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaCrouchB1.png'), (130, 250)
))
# Inserts Gina's jumping B attack animation
for x in range(0, 4):
self.jump_b_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaJumpB1.png'), (230, 250)
))
for x in range(0, 12):
self.jump_b_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaJumpB2.png'), (230, 250)
))
# Inserts Gina's standing C attack animation
for x in range(0, 4):
self.stand_c_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandC1.png'), (130, 250)
))
for x in range(0, 8):
self.stand_c_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandC2.png'), (160, 250)
))
for x in range(0, 9):
self.stand_c_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandC3.png'), (160, 250)
))
for x in range(0, 8):
self.stand_c_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandC4.png'), (130, 250)
))
for x in range(0, 10):
self.stand_c_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaStandC5.png'), (130, 250)
))
# Inserts Gina's crouching C attack animation
for x in range(0, 6):
self.crouch_c_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaCrouchC1.png'), (230, 250)
))
for x in range(0, 8):
self.crouch_c_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaCrouchC3.png'), (230, 250)
))
for x in range(0, 10):
self.crouch_c_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaCrouchC4.png'), (230, 250)
))
for x in range(0, 10):
self.crouch_c_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaCrouchC3.png'), (230, 250)
))
# Inserts Gina's jump C attack animation
for x in range(0, 10):
self.jump_c_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaJumpC1.png'), (170, 250)
))
for x in range(0, 5):
self.jump_c_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaJumpC2.png'), (170, 250)
))
for x in range(0, 5):
self.jump_c_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaJumpC3.png'), (170, 250)
))
# Inserts Gina's wakeup animation
for x in range(0, 30):
self.wakeup_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/GinaKnockdown.png'), (240, 130)
))
for x in range(0, 20):
self.wakeup_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/GinaWakeUp1.png'), (240, 180)
))
for x in range(0, 20):
self.wakeup_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/GinaWakeUp2.png'), (250, 180)
))
# Inserts Gina's grab animation
for x in range(0, 8):
self.grab_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaGrab1.png'), (190, 250)
))
for x in range(0, 8):
self.grab_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaGrab2.png'), (190, 250)
))
for x in range(0, 8):
self.grab_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaGrab3.png'), (190, 250)
))
# Inserts Gina's Hurt animation
for x in range(0, 1):
self.hurt_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/GinaHurt1.png'), (130, 250)
))
# Inserts Gina's special one animation
for x in range(0, 8):
self.special_one_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaFireBall1.png'), (190, 250)
))
for x in range(0, 8):
self.special_one_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaFireBall2.png'), (190, 250)
))
for x in range(0, 8):
self.special_one_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaFireBall3.png'), (190, 250)
))
for x in range(0, 12):
self.special_one_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/Attacks/GinaFireBall4.png'), (190, 250)
))
# Inserts Gina's Hurt animation
for x in range(0, 1):
self.hurt_animation.append(pygame.transform.scale(
pygame.image.load('Sprites/Gina/GinaHurt1.png'), (130, 250)
))
# ----------------------------------------------------------------------------------------------------------------
def update_hurt_box(self, player):
"""
This class updates the character's hurtbox as the battle goes on. Depending on her current action,
the hurtbox's size and position will constantly change.
:param player:
:return:
"""
# Assigns initial hurtboxes and hitboxes for the character...
self.hurt_box.clear()
if player.facingRight:
if not player.setAction:
if player.crouching:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value
, player.y + 70, self.rect.width, self.rect.height - 20))
else:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.got_air_hit:
self.hurt_box.append(InvincibleBox(player.x + 20 + player.off_set_value
, player.y + 30, self.rect.width + 10, self.rect.height - 30))
elif player.got_hit:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.getting_up:
self.hurt_box.append(InvincibleBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.isBackDashing:
self.hurt_box.append(InvincibleBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.grabbing:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.grabbed:
self.hurt_box.append(InvincibleBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.isDashing:
self.hurt_box.append(HurtBox(player.x + 70 + player.off_set_value
, player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.isJumping or player.isDescending:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value
, player.y + 30, self.rect.width + 10, self.rect.height - 30))
elif player.special_one:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.attack_a:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value
, player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.crouch_attack_a:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value
, player.y + 70, self.rect.width, self.rect.height - 20))
elif player.attack_b:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value
, player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.crouch_attack_b:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value
, player.y + 70, self.rect.width, self.rect.height - 20))
elif player.attack_c:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value
, player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.crouch_attack_c:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value
, player.y + 120, self.rect.width, self.rect.height - 50))
elif player.loser:
self.hurt_box.append(InvincibleBox(player.x + 20 + player.off_set_value
, player.y + 70, self.rect.width, self.rect.height - 20))
elif player.winner:
self.hurt_box.append(InvincibleBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
else:
if not player.setAction:
if player.crouching:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value
, player.y + 70, self.rect.width, self.rect.height - 20))
else:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.got_air_hit:
self.hurt_box.append(InvincibleBox(player.x + 20 + player.off_set_value
, player.y + 30, self.rect.width + 10, self.rect.height - 30))
elif player.got_hit:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.getting_up:
self.hurt_box.append(InvincibleBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.isBackDashing:
self.hurt_box.append(InvincibleBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.grabbing:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.grabbed:
self.hurt_box.append(InvincibleBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.isDashing:
self.hurt_box.append(HurtBox(
(player.x + 20)+ player.off_set_value
, (player.y + 30), self.rect.width + 10, self.rect.height + 30))
elif player.isJumping or player.isDescending:
self.hurt_box.append(HurtBox(
(player.x + 20) + player.off_set_value
, (player.y + 30), self.rect.width + 10, self.rect.height - 30))
elif player.special_one:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.attack_a:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value
, player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.crouch_attack_a:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value
, player.y + 70, self.rect.width, self.rect.height - 20))
elif player.attack_b:
self.hurt_box.append(HurtBox(player.x + 180 - player.off_set_value
, player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.crouch_attack_b:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value
, player.y + 70, self.rect.width, self.rect.height - 20))
elif player.attack_c:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value
, player.y + 30, self.rect.width + 10, self.rect.height + 30))
elif player.crouch_attack_c:
self.hurt_box.append(HurtBox(player.x + 20 + player.off_set_value
, player.y + 120, self.rect.width, self.rect.height - 50))
elif player.loser:
self.hurt_box.append(InvincibleBox(player.x + 20 + player.off_set_value
, player.y + 70, self.rect.width, self.rect.height - 20))
elif player.winner:
self.hurt_box.append(InvincibleBox(player.x + 20 + player.off_set_value,
player.y + 30, self.rect.width + 10, self.rect.height + 30))
if len(self.hurt_box) > 0:
player.collision_x = self.hurt_box[0].rect.x
player.collision_width = self.hurt_box[0].rect.width
# -----------------------------------------------------------------------------------------------------------------
def win_state(self, player):
"""
Plays the win animation where the character has won the battle.
:param player:
:return:
"""
if player.setAction is False:
if player.facingRight is True:
player.multiplier = 1
else:
player.multiplier = -1
player.index = 0
player.setAction = True
if player.multiplier == 1:
if player.index > len(player.character.victory_animation)-1:
player.index = 0
player.finish_win_animation = True
else:
player.character.image = player.character.victory_animation[player.index]
else:
if player.index > len(player.character.victory_animation)-1:
player.index = 0
player.finish_win_animation = True
else:
player.character.image =\
pygame.transform.flip(player.character.victory_animation[player.index], True, False)
player.index += 1
def lose_state(self, player, two):
"""
Plays the lose animation or defeat animation when the character's hp is gone or if their hp is less
than the opponent when time runs out.
:param player:
:param two:
:return:
"""
if player.setAction is False:
if player.facingRight is True:
player.multiplier = 1
else:
player.multiplier = -1
player.index = 0
player.setAction = True
if player.multiplier == 1:
if player.health_points > 0:
if player.index >= len(player.character.defeat_animation)-1:
player.index = len(player.character.defeat_animation)-1
player.finish_lose_animation = True
two.winner = True
else:
player.character.image = player.character.defeat_animation[player.index]
player.index += 1
else:
if player.index >= len(player.character.crumble_animation)-1:
player.index = len(player.character.crumble_animation)-1
player.finish_lose_animation = True
two.winner = True
else:
if player.index == 0:
player.y = player.yOriginal
if player.index == 30:
player.y = player.yKnockdown
player.character.image = player.character.crumble_animation[player.index]
player.index += 1
else:
if player.health_points > 0:
if player.index >= len(player.character.defeat_animation)-1:
player.index = len(player.character.defeat_animation)-1
player.finish_lose_animation = True
two.winner = True
else:
player.character.image =\
pygame.transform.flip(player.character.defeat_animation[player.index], True, False)
player.index += 1
else:
if player.index >= len(player.character.crumble_animation)-1:
player.index = len(player.character.crumble_animation)-1
player.finish_lose_animation = True
two.winner = True
else:
if player.index == 0:
player.y = player.yOriginal
if player.index == 30:
player.y = player.yKnockdown
player.character.image =\
pygame.transform.flip(player.character.crumble_animation[player.index], True, False)
player.index += 1
# -----------------------------------------------------------------------------------------------------------------
def stand_a(self, player):
"""
Plays the character's Stand A attack and updates hit box.
:param player:
:return:
"""
if player.setAction is False:
if player.facingRight is True:
player.multiplier = 1
else:
player.multiplier = -1
player.index = 0
player.setAction = True
if player.multiplier == 1:
if player.index > len(player.character.stand_a_animation)-1:
player.index = 0
else:
if player.index > 8 and player.index < 10 and player.hit_box_fill_once is not True:
stand_a_hit_box = HitBox(player.x + 100, player.y + 80, 50, 50, damage=5, hitstun=8, knockback= 3,
knockdown= False, blocktype='stand', attack_level=1)
self.hit_box.append(stand_a_hit_box)
player.hit_box_fill_once = True
player.character.image = player.character.stand_a_animation[player.index]
player.index += 1
else:
if player.index > len(player.character.stand_a_animation)-1:
player.index = 0
else:
if player.index >= 4 and player.index <= 9:
if player.off_set is False:
player.x -= 17
player.off_set = True
player.off_set_value = 17
else:
if player.off_set is True:
player.x += 17
player.off_set = False
player.off_set_value = 0
if player.index > 8 and player.index < 10 and player.hit_box_fill_once is not True:
stand_a_hit_box = HitBox(player.x - 20 + player.off_set_value, player.y + 80, 50, 50,
damage=5, hitstun=8, knockback= 3,knockdown= False, blocktype='stand',
attack_level=1)
self.hit_box.append(stand_a_hit_box)
player.hit_box_fill_once = True
player.character.image = \
pygame.transform.flip(player.character.stand_a_animation[player.index], True, False)
player.index += 1
if player.index > 12:
self.hit_box.clear()
if player.index >= len(player.character.stand_a_animation):
player.setAction = False
player.attack_a = False
player.hit_box_fill_once = False
def crouch_a(self, player):
"""
Plays the character's Crouch A attack and updates hit box.
:param player:
:return:
"""
if player.setAction is False:
if player.facingRight is True:
player.multiplier = 1
else:
player.multiplier = -1
player.index = 0
player.setAction = True
if player.multiplier == 1:
if player.index > len(player.character.crouch_a_animation)-1:
player.index = 0
else:
if player.index == 7:
player.x += 4
if player.index > 10 and player.index < 12 and player.hit_box_fill_once is not True:
stand_a_hit_box = HitBox(player.x + 100, player.y + 100, 70, 50, damage=5, hitstun=16, knockback= 2,
knockdown= False, blocktype='crouch',attack_level=1)
self.hit_box.append(stand_a_hit_box)
player.hit_box_fill_once = True
player.character.image = player.character.crouch_a_animation[player.index]
player.index += 1
else:
if player.index > len(player.character.crouch_a_animation)-1:
player.index = 0
else:
if player.index >= 0 and player.index < len(player.character.crouch_a_animation)-1:
if player.off_set is False:
player.x -= 20
player.off_set = True
player.off_set_value = 20
else:
if player.off_set is True:
player.x += 20
player.off_set = False
player.off_set_value = 0
if player.index == 7:
player.x -= 4
if player.index > 10 and player.index < 12 and player.hit_box_fill_once is not True:
stand_a_hit_box = HitBox(player.x - 20 + player.off_set_value, player.y + 100, 70, 50,
damage=5, hitstun=16, knockback= 2,knockdown= False, blocktype='crouch',
attack_level=1)
self.hit_box.append(stand_a_hit_box)
player.hit_box_fill_once = True
player.character.image = \
pygame.transform.flip(player.character.crouch_a_animation[player.index], True, False)
player.index += 1
if player.index > 12:
self.hit_box.clear()
if player.index >= len(player.character.crouch_a_animation):
player.setAction = False
pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaCrouch1.png'), (130, 250))
, True, False)
player.crouch_attack_a = False
player.hit_box_fill_once = False
def jump_a(self, player):
"""
Plays the character's Jump A attack and updates hit box.
:param player:
:return:
"""
self.hit_box.clear()
if player.index > len(player.character.jump_a_animation)-1:
player.index = 0
else:
if player.multiplier == 1:
player.character.image = player.character.jump_a_animation[player.index]
if player.index >= 0 and player.index < 5 and player.hit_box_fill_once is False:
jump_a_hit_box = HitBox(player.x + 100 + player.off_set_value, player.y - 10, 50, 50,
damage=5, hitstun=8, knockback= 3,knockdown= False, blocktype='overhead',
attack_level=1)
self.hit_box.append(jump_a_hit_box)
# player.hit_box_fill_once = True
else:
player.character.image = \
pygame.transform.flip(player.character.jump_a_animation[player.index],True, False)
if player.index >= 0 and player.index < 5 and player.hit_box_fill_once is False:
jump_a_hit_box = HitBox(player.x - 20 + player.off_set_value, player.y - 10, 50, 50,
damage=5, hitstun=8, knockback= 3,knockdown= False, blocktype='overhead'
,attack_level=1)
self.hit_box.append(jump_a_hit_box)
# player.hit_box_fill_once = True
player.index += 1
if player.index > len(player.character.jump_a_animation)-1:
player.jump_attack_a = False
player.air_attack_once = True
player.hit_box_fill_once = False
def stand_b(self, player):
"""
Plays the character's Stand B attack and updates hit box.
:param player:
:return:
"""
if player.setAction is False:
if player.facingRight is True:
player.multiplier = 1
else:
player.multiplier = -1
player.index = 0
player.setAction = True
if player.multiplier == 1:
if player.index > len(player.character.stand_b_animation):
player.index = 0
else:
if player.index == 5:
player.x += 10
if player.index > 9 and player.index < 18 and player.hit_box_fill_once is not True:
stand_b_hit_box = HitBox(player.x + 160, player.y + 120, 50, 50, damage=15, hitstun=20, knockback= 4,
knockdown= False, blocktype='stand', attack_level=2)
self.hit_box.append(stand_b_hit_box)
player.hit_box_fill_once = True
if player.index > 7 and player.index < 20:
self.hurt_box.append(DamageBox(player.x + 130 + player.off_set_value,
player.y + 120, 100, 60))
player.character.image = player.character.stand_b_animation[player.index]
if player.index == 34:
player.x -= 10
player.index += 1
if player.index > 19:
self.hit_box.clear()
else:
if player.index == 5:
player.x -= 10
if player.index >= 0 and player.index < len(player.character.stand_b_animation)-1:
if player.off_set is False:
player.x -= 85
player.off_set = True
player.off_set_value = 85
else:
if player.off_set is True:
player.x += 85
player.off_set = False
player.off_set_value = 0
if player.index >= 9 and player.index < 18 and player.hit_box_fill_once is not True:
stand_b_hit_box = HitBox(player.x - 90 + player.off_set_value, player.y + 120, 50, 50,
damage=15, hitstun=20, knockback= 4,knockdown= False, blocktype='stand'
,attack_level=2)
self.hit_box.append(stand_b_hit_box)
player.hit_box_fill_once = True
if player.index > 7 and player.index < 20:
self.hurt_box.append(DamageBox(player.x - 115 + player.off_set_value,
player.y + 120, 100, 60))
if player.index > len(player.character.stand_b_animation) - 1:
player.character.image = \
pygame.transform.flip(player.character.stand_b_animation[player.index-2], True, False)
else:
player.character.image = \
pygame.transform.flip(player.character.stand_b_animation[player.index], True, False)
if player.index == 34:
player.x += 10
if player.index > 19:
self.hit_box.clear()
player.index += 1
if player.index > len(player.character.stand_b_animation)-1:
player.character.image = \
pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaStand1.png'), (130, 250))
, True, False)
player.setAction = False
player.off_set = False
player.off_set_value = 0
player.attack_b = False
player.hit_box_fill_once = False
def crouch_b(self, player):
"""
Plays the character's Crouch B attack and updates hit box.
:param player:
:return:
"""
if player.setAction is False:
if player.facingRight is True:
player.multiplier = 1
else:
player.multiplier = -1
player.index = 0
player.setAction = True
if player.multiplier == 1:
if player.index > len(player.character.crouch_b_animation)-1:
player.index = 0
else:
if player.index > 12 and player.index < 18 and player.hit_box_fill_once is not True:
stand_a_hit_box = HitBox(player.x + 70, player.y, 80, 100, damage=15, hitstun=12, knockback= 3,
knockdown= False, blocktype='stand',attack_level=1)
self.hit_box.append(stand_a_hit_box)
player.hit_box_fill_once = True
player.character.image = player.character.crouch_b_animation[player.index]
player.index += 1
else:
if player.index > len(player.character.crouch_b_animation)-1:
player.index = 0
else:
if player.index >= 12 and player.index <= 18:
if player.off_set is False:
player.x -= 0
player.off_set = True
player.off_set_value = 0
else:
if player.off_set is True:
player.x += 0
player.off_set = False
player.off_set_value = 0
if player.index > 12 and player.index < 18 and player.hit_box_fill_once is not True:
crouch_b_hit_box = HitBox(player.x - 20 + player.off_set_value, player.y, 80, 100,
damage=15, hitstun=12, knockback= 3,knockdown= False, blocktype='stand'
,attack_level=1)
self.hit_box.append(crouch_b_hit_box)
player.hit_box_fill_once = True
player.character.image = \
pygame.transform.flip(player.character.crouch_b_animation[player.index], True, False)
player.index += 1
if player.index > 18:
self.hit_box.clear()
if player.index >= len(player.character.crouch_b_animation):
player.setAction = False
player.crouch_attack_b = False
player.hit_box_fill_once = False
def jump_b(self, player):
"""
Plays the character's Jump B attack and updates hit box.
:param player:
:return:
"""
self.hit_box.clear()
if player.index > len(player.character.jump_b_animation)-1:
player.index = 0
else:
if player.multiplier == 1:
player.character.image = player.character.jump_b_animation[player.index]
if player.index >= 6 and player.index < 12 and player.hit_box_fill_once is False \
and player.hit_confirm is False:
jump_b_hit_box = HitBox(player.x + 120 + player.off_set_value, player.y + 130, 100, 50,
damage=5, hitstun=12, knockback= 4,knockdown= False, blocktype='overhead'
,attack_level=2)
jump_b_hit_box_2 = HitBox(player.x + player.off_set_value, player.y + 170, 50, 50,
damage=5, hitstun=12, knockback= 4,knockdown= False, blocktype='overhead'
,attack_level=2)
self.hit_box.append(jump_b_hit_box)
self.hit_box.append(jump_b_hit_box_2)
# player.hit_box_fill_once = True
if player.index > 6 and player.index < len(player.character.jump_b_animation)-1:
self.hurt_box.append(DamageBox(player.x + 110 + player.off_set_value,
player.y + 130, 80, 60))
else:
if player.index >= 0 and player.index < len(player.character.jump_b_animation)-1:
if player.off_set is False:
player.x -= 100
player.off_set = True
player.off_set_value = 100
else:
if player.off_set is True:
player.x += 100
player.off_set = False
player.off_set_value = 0
player.character.image = \
pygame.transform.flip(player.character.jump_b_animation[player.index],True, False)
if player.index >= 6 and player.index < 12 and player.hit_box_fill_once is False \
and player.hit_confirm is False:
jump_b_hit_box = HitBox(player.x - 70 + player.off_set_value, player.y + 130, 100, 50,
damage=5, hitstun=12, knockback= 4,knockdown= False, blocktype='overhead'
,attack_level=2)
jump_b_hit_box_2 = HitBox(player.x + 80 + player.off_set_value, player.y + 170, 50, 50,
damage=5, hitstun=12, knockback= 4,knockdown= False, blocktype='overhead'
,attack_level=2)
self.hit_box.append(jump_b_hit_box)
self.hit_box.append(jump_b_hit_box_2)
# player.hit_box_fill_once = True
if player.index > 6 and player.index < len(player.character.jump_b_animation)-1:
self.hurt_box.append(DamageBox(player.x - 80 + player.off_set_value,
player.y + 130, 80, 60))
player.index += 1
if player.index > len(player.character.jump_b_animation)-1:
if player.multiplier == 1:
player.character.image = pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump4.png'),
(130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump4.png'),
(130, 250)), True, False)
player.off_set = False
player.jump_attack_b = False
player.air_attack_once = True
player.hit_box_fill_once = False
def stand_c(self, player):
"""
Plays the character's Stand C attack and updates hit box.
:param player:
:return:
"""
if player.setAction is False:
if player.facingRight is True:
player.multiplier = 1
else:
player.multiplier = -1
player.index = 0
player.setAction = True
if player.multiplier == 1:
if player.index > len(player.character.stand_c_animation)-1:
player.index = 0
else:
if player.index > 13 and player.index < 25 and player.hit_box_fill_once is not True:
stand_c_hit_box_1 = HitBox(player.x + 100, player.y + 50, 80, 70,
damage=20, hitstun=12, knockback= 3,
knockdown= False, blocktype='stand', attack_level=3)
self.hit_box.append(stand_c_hit_box_1)
player.hit_box_fill_once = True
if player.index > 26 and player.index < 34 and player.hit_box_fill_twice is not True:
stand_c_hit_box_2 = HitBox(player.x + 110, player.y + 40, 80, 70,
damage=20, hitstun=12, knockback= -3,
knockdown= False, blocktype='stand', attack_level=3)
self.hit_box.append(stand_c_hit_box_2)
player.hit_box_fill_twice = True
if player.index > 10 and player.index < 35:
self.hurt_box.append(DamageBox(player.x + 100 + player.off_set_value,
player.y + 55, 100, 60))
player.character.image = player.character.stand_c_animation[player.index]
player.index += 1
else:
if player.index > len(player.character.stand_c_animation)-1:
player.index = 0
else:
if player.index >= 4 and player.index <= 20:
if player.off_set is False:
player.x -= 20
player.off_set = True
player.off_set_value = 20
else:
if player.off_set is True:
player.x += 20
player.off_set = False
player.off_set_value = 0
if player.index > 13 and player.index < 25 and player.hit_box_fill_once is not True:
stand_c_hit_box_1 = HitBox(player.x - 40 + player.off_set_value, player.y + 50, 80, 70,
damage=5, hitstun=12, knockback= 3,knockdown= False, blocktype='stand'
,attack_level=3)
self.hit_box.append(stand_c_hit_box_1)
player.hit_box_fill_once = True
if player.index > 26 and player.index < 34 and player.hit_box_fill_twice is not True:
stand_c_hit_box_2 = HitBox(player.x - 50 + player.off_set_value, player.y + 40, 80, 70,
damage=20, hitstun=12, knockback= -3,
knockdown= False, blocktype='stand', attack_level=3)
self.hit_box.append(stand_c_hit_box_2)
player.hit_box_fill_twice = True
if player.index > 10 and player.index < 35:
self.hurt_box.append(DamageBox(player.x - 60 + player.off_set_value,
player.y + 55, 100, 60))
player.character.image = \
pygame.transform.flip(player.character.stand_c_animation[player.index], True, False)
player.index += 1
if player.index > 35:
self.hit_box.clear()
if player.index >= len(player.character.stand_c_animation):
player.setAction = False
player.attack_c = False
player.hit_box_fill_once = False
player.hit_box_fill_twice = False
def crouch_c(self, player):
"""
Plays the character's Crouch C attack and updates hit box.
:param player:
:return:
"""
if player.setAction is False:
if player.facingRight is True:
player.multiplier = 1
else:
player.multiplier = -1
player.index = 0
player.setAction = True
if player.multiplier == 1:
if player.index > len(player.character.crouch_c_animation)-1:
player.index = 0
if player.index >= 14 and player.index <= 24 and player.hit_box_fill_once is not True:
crouch_c_hit_box_1 = HitBox(player.x + 130, player.y + 200, 100, 20,
damage=14, hitstun=15, knockback= 3,
knockdown= True, blocktype='sweep', attack_level=3)
self.hit_box.append(crouch_c_hit_box_1)
player.hit_box_fill_once = True
player.character.image = player.character.crouch_c_animation[player.index]
player.index += 1
else:
if player.index > len(player.character.crouch_c_animation)-1:
player.index = 0
if player.index >= 0 and player.index < len(player.character.crouch_c_animation)-1:
if player.off_set is False:
player.x -= 100
player.off_set = True
player.off_set_value = 100
else:
if player.off_set is True:
player.x += 100
player.off_set = False
player.off_set_value = 0
if player.index >= 14 and player.index <= 24 and player.hit_box_fill_once is not True:
crouch_c_hit_box_1 = HitBox(player.x, player.y + 200, 100, 20,
damage=14, hitstun=15, knockback= 3,
knockdown= True, blocktype='sweep', attack_level=3)
self.hit_box.append(crouch_c_hit_box_1)
player.hit_box_fill_once = True
player.character.image = \
pygame.transform.flip(player.character.crouch_c_animation[player.index], True, False)
player.index += 1
if player.index > 24:
self.hit_box.clear()
if player.index > len(player.character.crouch_c_animation)-1:
if player.multiplier == 1:
player.character.image = player.character.crouching[0]
else:
player.character.image = \
pygame.transform.flip(player.character.crouching[0], True, False)
player.setAction = False
player.crouch_attack_c = False
player.hit_box_fill_once = False
def jump_c(self, player):
"""
Plays the character's Jump C attack and updates hit box.
:param player:
:return:
"""
self.hit_box.clear()
if player.index > len(player.character.jump_c_animation)-1:
player.index = 0
else:
if player.multiplier == 1:
player.character.image = player.character.jump_c_animation[player.index]
if player.index >= 15 and player.index < len(player.character.jump_c_animation)-1 \
and player.hit_box_fill_once is False and player.hit_confirm is False:
jump_c_hitbox = HitBox(player.x + 80 + player.off_set_value, player.y + 150, 50, 70,
damage=10, hitstun=15, knockback= 5,knockdown= False, blocktype='overhead'
,attack_level=3)
self.hit_box.append(jump_c_hitbox)
else:
if player.index >= 0 and player.index < len(player.character.jump_c_animation)-1:
if player.off_set is False:
player.off_set = True
player.off_set_value = 40
player.x -= 40
else:
if player.off_set is True:
player.off_set = False
player.off_set_value = 0
player.x += 40
if player.index >= 15 and player.index < len(player.character.jump_c_animation)-1 \
and player.hit_box_fill_once is False and player.hit_confirm is False:
jump_c_hitbox = HitBox(player.x - 30 + player.off_set_value, player.y + 150, 50, 70,
damage=10, hitstun=15, knockback= 5,knockdown= False, blocktype='overhead'
,attack_level=3)
self.hit_box.append(jump_c_hitbox)
player.character.image = pygame.transform.flip(player.character.jump_c_animation[player.index]
, True, False)
player.index += 1
if player.index > 16 and player.index < len(player.character.jump_c_animation)-1:
player.y -= 6
if player.index > len(player.character.jump_c_animation)-1:
if player.multiplier == 1:
player.character.image = pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump4.png'),
(130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump4.png'),
(130, 250)), True, False)
player.off_set = False
player.jump_attack_c = False
player.air_attack_once = True
player.hit_box_fill_once = False
if player.isJumping:
player.isJumping = False
player.isDescending = True
def grab_attack(self, player):
"""
Plays the character's grab attack and updates grab box.
:param player:
:return:
"""
if player.setAction is False:
if player.facingRight is True:
player.multiplier = 1
else:
player.multiplier = -1
player.index = 0
player.setAction = True
if player.index > len(player.character.grab_animation)-1:
player.index = 0
else:
if player.multiplier == 1:
player.character.image = player.character.grab_animation[player.index]
if player.index >= 16 and player.index < len(player.character.grab_animation)-1:
grab_box = GrabBox(player.x + 100 + player.off_set_value, player.y + 80, 30, 30)
self.hit_box.append(grab_box)
else:
if player.index >= 0 and player.index < len(player.character.grab_animation)-1:
if player.off_set is False:
player.off_set = True
player.off_set_value = 60
player.x -= 60
else:
if player.off_set is True:
player.off_set = False
player.off_set_value = 0
player.x += 60
player.character.image = pygame.transform.flip(player.character.grab_animation[player.index]
, True, False)
if player.index >= 16 and player.index < len(player.character.grab_animation)-1:
grab_box = GrabBox(player.x + player.off_set_value, player.y + 80, 30, 30)
self.hit_box.append(grab_box)
player.index += 1
if player.index > 20:
self.hit_box.clear()
if player.index > len(player.character.grab_animation)-1:
if player.multiplier == 1:
player.character.image = player.character.standing[0]
else:
player.character.image = pygame.transform.flip(player.character.standing[0], True, False)
player.setAction = False
player.off_set = False
player.off_set_value = 0
player.grabbing = False
player.hit_box_fill_once = False
def throw_attack(self, player, two):
"""
Plays the character's throw and updates the opposing player's state after the throw.
:param player:
:param two:
:return:
"""
if player.off_set:
player.x += player.off_set_value
player.off_set = False
player.off_set_value = 0
if player.setAction is False:
if player.facingRight is True:
player.multiplier = 1
else:
player.multiplier = -1
player.index = 0
player.setAction = True
if player.multiplier == 1:
if two.off_set is False:
two.y += 130
two.health_points -= 8
two.off_set = True
player.character.image = \
pygame.transform.scale(pygame.image.load
('Sprites/Gina/GinaCrouch1.png'), (130, 250))
two.character.image = \
pygame.transform.flip(two.character.wakeup_animation[0], True, False)
if player.animation_fill_once is False:
self.effects_animation.append(ThrowDust(player.x + 50, player.y))
player.animation_fill_once = True
else:
if two.off_set is False:
two.y += 130
two.health_points -= 8
two.off_set = True
player.character.image = \
pygame.transform.flip(pygame.transform.scale(pygame.image.load
('Sprites/Gina/GinaCrouch1.png'), (130, 250)), True, False)
two.character.image = two.character.wakeup_animation[0]
if player.animation_fill_once is False:
self.effects_animation.append(ThrowDust(player.x - 120, player.y))
player.animation_fill_once = True
player.index += 1
if player.index > 40:
self.hit_box.clear()
player.setAction = False
player.animation_fill_once = False
player.throw = False
two.setAction = False
two.grabbed = False
two.getting_up = True
two.off_set = False
two.timer = 1
# -----------------------------------------------------------------------------------------------------------------
def special_one(self, player):
"""
Plays the character's special attack one and updates hit box.
:param player:
:return:
"""
if player.setAction is False:
if player.facingRight is True:
player.multiplier = 1
else:
player.multiplier = -1
player.index = 0
player.setAction = True
if player.index > len(player.character.special_one_animation)-1:
player.index = 0
else:
if player.multiplier == 1:
player.character.image = player.character.special_one_animation[player.index]
if player.index >= 24 and player.index < len(player.character.special_one_animation)-1\
and player.hit_box_fill_once is False and player.meter_points >= 20:
special_box = GinaFireBall(100, player.x + self.rect.width, player.y + 50, 80, 80,
damage=12, hitstun=10, knockback= 3,knockdown= False, blocktype='stand'
,attack_level=2)
self.hit_box.append(special_box)
player.hit_box_fill_once = True
player.meter_points -= 20
else:
if player.index >= 0 and player.index < len(player.character.special_one_animation)-1:
if player.off_set is False:
player.off_set = True
player.off_set_value = 40
player.x -= 40
else:
if player.off_set is True:
player.off_set = False
player.off_set_value = 0
player.x += 40
player.character.image = pygame.transform.flip(player.character.special_one_animation[player.index]
, True, False)
if player.index >= 24 and player.index < len(player.character.special_one_animation)-1\
and player.hit_box_fill_once is False and player.meter_points >= 20:
special_box = GinaFireBall(-100, player.x, player.y + 50, 80, 80,
damage=12, hitstun=10, knockback= 3,knockdown= False, blocktype='stand'
,attack_level=2)
self.hit_box.append(special_box)
player.hit_box_fill_once = True
player.meter_points -= 20
player.index += 1
if player.index > len(player.character.special_one_animation)-1:
if player.multiplier == 1:
player.character.image = player.character.standing[0]
else:
player.character.image = pygame.transform.flip(player.character.standing[0], True, False)
player.setAction = False
player.special_one = False
player.hit_box_fill_once = False
# -----------------------------------------------------------------------------------------------------------------
def being_damaged(self, player):
"""
Plays the character's damaged animation and updates their hurt box.
:param player:
:return:
"""
if player.facingRight and not player.setAction:
player.multiplier = -1
player.setAction = True
elif not player.facingRight and not player.setAction:
player.multiplier = 1
player.setAction = True
if player.multiplier == -1:
if player.hitstun > player.max_hitstun/2:
if player.animation_fill_once is False:
self.effects_animation.append(DamageAnimation(player.x, player.y - 150, True, player.attack_level))
player.animation_fill_once = True
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaHurt1.png'), (130, 250))
else:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaHurt2.png'), (130, 250))
player.hitstun -= 1
else:
if player.hitstun > player.max_hitstun/2:
if player.animation_fill_once is False:
self.effects_animation.append(DamageAnimation(player.x - 100, player.y - 150, False
, player.attack_level))
player.animation_fill_once = True
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaHurt1.png'),
(130, 250)), True, False)
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaHurt2.png'),
(130, 250)), True, False)
player.hitstun -= 1
player.x += player.knockback * player.multiplier
if player.hitstun <= 0:
if player.knockdown is True:
if player.loser is False:
player.y = player.yKnockdown
if player.facingRight:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaKnockdown.png'), (250, 130))
player.getting_up = True
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaKnockdown.png'),
(250, 130)), True, False)
player.getting_up = True
player.got_hit = False
player.animation_fill_once = False
player.setAction = False
player.hitstun = 0
player.max_hitstun = 0
player.knockback = 0
player.attack_level = 0
def being_air_damaged(self, player):
"""
Plays the character's damaged animation in the air and updates their hurt box.
:param player:
:return:
"""
if player.facingRight and not player.setAction:
player.isJumping = False
player.isDescending = False
player.multiplier = -1
player.setAction = True
elif not player.facingRight and not player.setAction:
player.isJumping = False
player.isDescending = False
player.multiplier = 1
player.setAction = True
if player.facingRight:
if player.animation_fill_once is False:
self.effects_animation.append(DamageAnimation(player.x, player.y - 150, False, player.attack_level))
player.animation_fill_once = True
if player.hitstun > player.max_hitstun/2:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaHurt1.png'), (130, 250))
player.y -= 7
player.current_jump += 15
elif player.hitstun > player.max_hitstun/4 and player.hitstun < player.max_hitstun/2:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaHurt2.png'), (130, 250))
elif player.hitstun < player.max_hitstun/4:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaAirHurt.png'), (150, 280))
player.y += 7
player.current_jump -= 8
player.hitstun -= 1
else:
if player.animation_fill_once is False:
self.effects_animation.append(DamageAnimation(player.x - 100, player.y - 150, False
, player.attack_level))
player.animation_fill_once = True
if player.hitstun > player.max_hitstun/2:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaHurt1.png'),
(130, 250)), True, False)
player.y -= 7
player.current_jump += 15
elif player.hitstun > player.max_hitstun/4 and player.hitstun < player.max_hitstun/2:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaHurt2.png'),
(130, 250)), True, False)
elif player.hitstun < player.max_hitstun/4:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaAirHurt.png'),
(150, 280)), True, False)
player.y += 7
player.current_jump -= 8
player.hitstun -= 1
player.x += player.knockback * 1.5 * player.multiplier
if player.current_jump <= 0:
player.knockdown = True
if player.facingRight:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaKnockdown.png'), (250, 130))
player.getting_up = True
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaKnockdown.png'),
(250, 130)), True, False)
player.getting_up = True
player.current_jump = 0
if player.loser is False:
player.y = player.yKnockdown
player.animation_fill_once = False
player.got_air_hit = False
player.setAction = False
player.isJumping = False
player.neutral_jumping = False
player.forward_jumping = False
player.back_jumping = False
player.isDescending = False
player.hitstun = 0
player.max_hitstun = 0
player.knockback = 0
def wake_up(self, player):
"""
Plays the character's wake up animation and updates their hurt box.
:param player:
:return:
"""
if player.facingRight and not player.setAction:
player.index = 0
player.multiplier = 1
player.setAction = True
elif not player.facingRight and not player.setAction:
player.index = 0
player.multiplier = -1
player.setAction = True
if player.facingRight:
if player.index > len(player.character.wakeup_animation)-1:
player.index = 0
else:
if player.index >= 0 and player.index < 30:
self.hurt_box.append(InvincibleBox(player.x + 50 + player.off_set_value
, player.y, self.rect.width + 30, self.rect.height - 50))
elif player.index >= 30 and player.index < len(player.character.wakeup_animation)-1:
self.hurt_box.append(InvincibleBox(player.x + 30 + player.off_set_value
, player.y, self.rect.width + 30, self.rect.height - 50))
if player.index >= 30 and player.index < len(player.character.wakeup_animation)-1:
if player.off_set is False:
player.y -= 60
player.off_set = True
player.off_set_value = 0
else:
if player.off_set is True:
player.y += 60
player.off_set = False
player.off_set_value = 0
player.character.image = player.character.wakeup_animation[player.index]
player.index += 1
else:
if player.index > len(player.character.wakeup_animation)-1:
player.index = 0
else:
if player.index >= 0 and player.index < 30:
self.hurt_box.append(InvincibleBox(player.x + 50 + player.off_set_value
, player.y, self.rect.width + 30, self.rect.height - 50))
elif player.index >= 30 and player.index < len(player.character.wakeup_animation)-1:
self.hurt_box.append(InvincibleBox(player.x + 30 + player.off_set_value
, player.y, self.rect.width + 30, self.rect.height - 50))
if player.index >= 30 and player.index < len(player.character.wakeup_animation)-1:
if player.off_set is False:
player.y -= 60
player.x -= 50
player.off_set = True
player.off_set_value = 50
else:
if player.off_set is True:
player.y += 60
player.x += 50
player.off_set = False
player.off_set_value = 0
player.character.image = pygame.transform.flip(
player.character.wakeup_animation[player.index], True, False)
player.index += 1
if player.index > len(player.character.wakeup_animation) - 1:
if player.facingRight:
player.character.image = player.character.standing[0]
else:
player.character.image = pygame.transform.flip(player.character.standing[0], True, False)
player.knockdown = False
player.getting_up = False
player.setAction = False
player.y = player.yOriginal
def being_blocked(self, player):
"""
Plays the character's block animation and updates their hurt box.
:param player:
:return:
"""
if player.facingRight and not player.setAction:
player.multiplier = -1
player.setAction = True
elif not player.facingRight and not player.setAction:
player.multiplier = 1
player.setAction = True
if player.facingRight:
if player.hitstun > 0:
if player.blockingLow:
if player.animation_fill_once is False:
self.effects_animation.append(BlockAnimation(player.x, player.y - 25, True))
player.animation_fill_once = True
player.character.image = \
pygame.transform.scale(pygame.image.load
('Sprites/Gina/GinaCrouchBlock.png'), (130, 250))
else:
if player.animation_fill_once is False:
self.effects_animation.append(BlockAnimation(player.x, player.y - 50, True))
player.animation_fill_once = True
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaStandBlock.png'), (130, 250))
player.hitstun -= 1
else:
if player.hitstun > 0:
if player.blockingLow:
if player.animation_fill_once is False:
self.effects_animation.append(BlockAnimation(player.x - 80, player.y - 25, True))
player.animation_fill_once = True
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaCrouchBlock.png'),
(130, 250)), True, False)
else:
if player.animation_fill_once is False:
self.effects_animation.append(BlockAnimation(player.x - 80, player.y - 50, True))
player.animation_fill_once = True
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaStandBlock.png'),
(130, 250)), True, False)
player.hitstun -= 1
player.x += player.knockback * player.multiplier
if player.hitstun <= 0:
player.animation_fill_once = False
player.block_hit = False
player.setAction = False
player.hitstun = 0
player.max_hitstun = 0
player.knockback = 0
def push_back_grab(self, player):
"""
Plays the character's animation when repeling a grab from the opposing enemy.
:param player:
:return:
"""
if player.off_set:
player.x += player.off_set_value
player.off_set = False
player.off_set_value = 0
if player.facingRight and not player.setAction:
player.index = 0
player.multiplier = -1
player.setAction = True
elif not player.facingRight and not player.setAction:
player.index = 0
player.multiplier = 1
player.setAction = True
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load
('Sprites/Gina/GinaHurt1.png'), (130, 250))
player.x -= 3
else:
player.character.image = \
pygame.transform.flip(pygame.transform.scale(pygame.image.load
('Sprites/Gina/GinaHurt1.png'), (130, 250)), True, False)
player.x += 3
player.index += 1
if player.index > 25:
self.hit_box.clear()
player.setAction = False
player.push_back = False
# -----------------------------------------------------------------------------------------------------------------
def jumping(self, player):
"""
Plays either the character's jump animation in neutral, forward or backwards. This method also checks to
see if the player is currently attacking in the air.
:param player:
:return:
"""
if player.facingRight and not player.setAction:
player.index = 0
player.multiplier = 1
player.setAction = True
elif not player.facingRight and not player.setAction:
player.index = 0
player.multiplier = -1
player.setAction = True
momentum = 1
if player.jump_attack_c is True:
momentum = 0
else:
momentum = 1
if player.back_jumping:
if player.isJumping:
if player.current_jump < len(player.character.jump)/6:
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump1.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump1.png'),
(130, 250)), True, False)
player.y -= 0 * momentum
elif player.current_jump > len(player.character.jump)/6 and \
player.current_jump < len(player.character.jump)/4:
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump2.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump2.png'),
(130, 250)), True, False)
player.y -= 12.2* momentum
player.x -= 10* momentum
elif player.current_jump > len(player.character.jump)/4 and \
player.current_jump < len(player.character.jump)/1.7:
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump3.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump3.png'),
(130, 250)), True, False)
player.y -= 18.1* momentum
player.x -= 8* momentum
elif player.current_jump > len(player.character.jump)/1.7 and \
player.current_jump < len(player.character.jump):
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump4.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump4.png'),
(130, 250)), True, False)
player.y -= 15.3* momentum
player.x -= 7* momentum
elif player.current_jump > len(player.character.jump):
player.y -= 12.1* momentum
player.current_jump += 12.0* momentum
elif player.isDescending:
if player.current_jump < len(player.character.jump) and \
player.current_jump > len(player.character.jump)/1.7:
player.y += 10* momentum
player.x -= 7* momentum
elif player.current_jump < len(player.character.jump)/1.7 and \
player.current_jump > len(player.character.jump)/4:
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump3.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump3.png'),
(130, 250)), True, False)
player.y += 15* momentum
player.x -= 6* momentum
elif player.current_jump < len(player.character.jump)/4 and \
player.current_jump > len(player.character.jump)/6:
player.y += 18* momentum
player.x -= 5* momentum
elif player.current_jump < len(player.character.jump)/6:
player.y += 22* momentum
player.current_jump -= 12.0* momentum
elif player.forward_jumping:
if player.isJumping:
if player.current_jump < len(player.character.jump)/6:
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump1.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump1.png'),
(130, 250)), True, False)
player.y -= 0* momentum
elif player.current_jump > len(player.character.jump)/6 and \
player.current_jump < len(player.character.jump)/4:
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump2.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump2.png'),
(130, 250)), True, False)
player.y -= 12.2* momentum
player.x += 4* momentum
elif player.current_jump > len(player.character.jump)/4 and \
player.current_jump < len(player.character.jump)/1.7:
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump3.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump3.png'),
(130, 250)), True, False)
player.y -= 17.1* momentum
player.x += 6* momentum
elif player.current_jump > len(player.character.jump)/1.7 and \
player.current_jump < len(player.character.jump):
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump4.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump4.png'),
(130, 250)), True, False)
player.y -= 15.3* momentum
player.x += 8* momentum
elif player.current_jump > len(player.character.jump):
player.y -= 12.1* momentum
player.current_jump += 12.0* momentum
elif player.isDescending:
if player.current_jump < len(player.character.jump) and \
player.current_jump > len(player.character.jump)/1.7:
player.y += 10* momentum
player.x += 8* momentum
elif player.current_jump < len(player.character.jump)/1.7 and \
player.current_jump > len(player.character.jump)/4:
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump3.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump3.png'),
(130, 250)), True, False)
player.y += 15* momentum
player.x += 6* momentum
elif player.current_jump < len(player.character.jump)/4 and \
player.current_jump > len(player.character.jump)/6:
player.y += 18* momentum
player.x += 4* momentum
elif player.current_jump < len(player.character.jump)/6:
player.y += 22* momentum
player.current_jump -= 12.0* momentum
elif player.neutral_jumping:
if player.isJumping:
if player.current_jump < len(player.character.jump)/6:
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump1.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump1.png'),
(130, 250)), True, False)
player.y -= 0* momentum
elif player.current_jump > len(player.character.jump)/6 and \
player.current_jump < len(player.character.jump)/4:
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump2.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump2.png'),
(130, 250)), True, False)
player.y -= 12.2* momentum
elif player.current_jump > len(player.character.jump)/4 and \
player.current_jump < len(player.character.jump)/1.7:
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump3.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump3.png'),
(130, 250)), True, False)
player.y -= 17.1* momentum
elif player.current_jump > len(player.character.jump)/1.7 and \
player.current_jump < len(player.character.jump):
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump4.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump4.png'),
(130, 250)), True, False)
player.y -= 15.3* momentum
elif player.current_jump > len(player.character.jump):
player.y -= 12.1* momentum
player.current_jump += 12.0* momentum
elif player.isDescending:
if player.current_jump < len(player.character.jump) and \
player.current_jump > len(player.character.jump)/1.7:
player.y += 10* momentum
elif player.current_jump < len(player.character.jump)/1.7 and \
player.current_jump > len(player.character.jump)/4:
if player.multiplier == 1:
player.character.image = \
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump3.png'), (130, 250))
else:
player.character.image = pygame.transform.flip(
pygame.transform.scale(pygame.image.load('Sprites/Gina/GinaJump3.png'),
(130, 250)), True, False)
player.y += 15* momentum
elif player.current_jump < len(player.character.jump)/4 and \
player.current_jump > len(player.character.jump)/6:
player.y += 18* momentum
elif player.current_jump < len(player.character.jump)/6:
player.y += 22* momentum
player.current_jump -= 12.0* momentum
if player.jump_attack_a and player.air_attack_once is False:
self.jump_a(player)
if player.jump_attack_b and player.air_attack_once is False:
self.jump_b(player)
if player.jump_attack_c and player.air_attack_once is False:
self.jump_c(player)
if player.current_jump >= len(player.character.jump) and player.isJumping:
player.isJumping = False
player.isDescending = True
elif player.current_jump <= 0 and player.isDescending:
if player.multiplier == 1:
player.character.image = player.character.standing[0]
else:
player.character.image = pygame.transform.flip(
player.character.standing[0], True, False)
self.hit_box.clear()
if player.off_set:
player.off_set = False
player.x += player.off_set_value
player.off_set_value = 0
player.isDescending = False
player.neutral_jumping = False
player.forward_jumping = False
player.back_jumping = False
player.jump_attack_a = False
player.jump_attack_b = False
player.jump_attack_c = False
player.air_attack_once = False
player.hit_confirm = False
player.setAction = False
player.y = player.yOriginal
player.current_jump = 0
def forward_dash(self, player):
"""
Plays the character's dash forward animation and updates their hurt box.
:param player:
:return:
"""
collide_multiplier = 1
if player.facingRight and not player.setAction:
player.multiplier = 1
player.setAction = True
elif not player.facingRight and not player.setAction:
player.multiplier = -1
player.setAction = True
if player.dash_collide is True:
collide_multiplier = 0
if player.currentDash < len(player.character.dash)/2:
player.x += 6.7*player.multiplier*collide_multiplier
player.y -= .53
elif player.currentDash > len(player.character.dash)/2 and \
player.currentDash < len(player.character.dash)/1.5:
player.x += 5.75*player.multiplier
elif player.currentDash > len(player.character.dash)/1.5:
player.x += 5.175*player.multiplier*collide_multiplier
player.y += 0.46
player.currentDash += 6
if player.currentDash >= len(player.character.dash):
player.isDashing = False
player.dash_collide = False
player.setAction = False
player.y = player.yOriginal
player.currentDash = 0
if player.index >= len(player.character.dash):
player.index = 0
else:
if player.multiplier == 1:
player.character.image = player.character.dash[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.dash[player.index], True, False)
player.index += 1
def back_dash(self, player, x):
"""
Plays the character's back dash animation and updates their hurt box.
:param player:
:param x:
:return:
"""
if player.facingRight and not player.setAction:
player.multiplier = 1
player.setAction = True
elif not player.facingRight and not player.setAction:
player.multiplier = -1
player.setAction = True
if player.currentDash < len(player.character.backdash)/2:
player.x += 3.7*(-1)*player.multiplier
player.y -= .56
elif player.currentDash > len(player.character.backdash)/2 \
and player.currentDash < len(player.character.backdash)/1.5:
player.x += 3.35*(-1)*player.multiplier
elif player.currentDash > len(player.character.backdash)/1.5:
player.x += 3.175*(-1)*player.multiplier
player.y += .48
player.currentDash += 6
if player.currentDash >= len(player.character.backdash):
player.isBackDashing = False
player.setAction = False
player.y = player.yOriginal
player.currentDash = 0
if player.index >= len(player.character.backdash):
player.index = 0
else:
if player.multiplier == 1:
player.character.image = player.character.backdash[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.backdash[player.index], True, False)
player.index += 1
# -----------------------------------------------------------------------------------------------------------------
def update(self, player, x, two):
"""
This method plays and checks all of the status of the player. If certain booleans are activated in the player
class, the character will act accordingly to that action.
:param player:
:param x:
:param two:
:return:
"""
for hits in self.hit_box:
fire_ball = isinstance(hits, GinaFireBall)
if fire_ball:
if hits.distance > 0:
if hits.distance_traveled >= hits.distance:
self.hit_box.remove(hits)
else:
if hits.distance_traveled <= hits.distance:
self.hit_box.remove(hits)
if player.block_hit:
self.being_blocked(player)
elif player.got_hit:
self.being_damaged(player)
elif player.got_air_hit:
self.being_air_damaged(player)
elif player.neutral_jumping or player.forward_jumping \
or player.back_jumping or player.isDescending:
self.jumping(player)
elif player.isDashing:
player.meter_points += 0.2
self.forward_dash(player)
elif player.isBackDashing:
self.back_dash(player, x)
elif player.grabbed:
pass
elif player.grabbing:
self.grab_attack(player)
elif player.throw:
self.throw_attack(player, two)
elif player.push_back:
self.push_back_grab(player)
elif player.crouch_attack_a:
self.crouch_a(player)
elif player.attack_a:
self.stand_a(player)
elif player.crouch_attack_b:
self.crouch_b(player)
elif player.attack_b:
self.stand_b(player)
elif player.crouch_attack_c:
self.crouch_c(player)
elif player.attack_c:
self.stand_c(player)
elif player.special_one:
self.special_one(player)
elif player.winner:
self.win_state(player)
elif player.loser:
self.lose_state(player, two)
elif player.getting_up:
self.wake_up(player)
elif x < 0:
player.x += x
if player.facingRight:
if player.index >= len(player.character.walkBackward):
player.index = 0
else:
if player.facingRight:
player.character.image = player.character.walkBackward[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.walkBackward[player.index],True, False)
player.index += 1
else:
if player.index >= len(player.character.walkFoward):
player.index = 0
else:
if player.facingRight:
player.character.image = player.character.walkFoward[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.walkFoward[player.index], True, False)
player.index += 1
elif x > 0:
player.x += x
if player.facingRight:
if player.index >= len(player.character.walkFoward):
player.index = 0
else:
if player.facingRight:
player.character.image = player.character.walkFoward[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.walkFoward[player.index], True, False)
player.index += 1
else:
if player.index >= len(player.character.walkBackward):
player.index = 0
else:
if player.facingRight:
player.character.image = player.character.walkBackward[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.walkBackward[player.index], True, False)
player.index += 1
elif x == 0:
player.x += x
if player.crouching != True:
if player.index >= len(player.character.standing):
player.index = 0
else:
if player.facingRight:
player.character.image = player.character.standing[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.standing[player.index], True, False)
player.index += 1
else:
if player.index >= len(player.character.crouching):
player.index = 0
else:
if player.facingRight:
player.character.image = player.character.crouching[player.index]
player.index += 1
else:
player.character.image = \
pygame.transform.flip(player.character.crouching[player.index], True, False)
player.index += 1
| 48.490133
| 130
| 0.520992
| 11,420
| 105,660
| 4.708406
| 0.03021
| 0.060759
| 0.034369
| 0.056054
| 0.92799
| 0.91605
| 0.896039
| 0.870895
| 0.845007
| 0.816459
| 0
| 0.040525
| 0.378308
| 105,660
| 2,178
| 131
| 48.512397
| 0.778042
| 0.043933
| 0
| 0.784129
| 0
| 0
| 0.042338
| 0.040837
| 0
| 0
| 0
| 0
| 0
| 1
| 0.013873
| false
| 0.000555
| 0.004994
| 0
| 0.019423
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4e2b98d350d6b35d3975c8daa161be68aa0efa9f
| 59,476
|
py
|
Python
|
core/train.py
|
kianakiaei/TSGL-EEGNet
|
c7597ea49c80116672005d59c070313ea44fe3c6
|
[
"MIT"
] | null | null | null |
core/train.py
|
kianakiaei/TSGL-EEGNet
|
c7597ea49c80116672005d59c070313ea44fe3c6
|
[
"MIT"
] | null | null | null |
core/train.py
|
kianakiaei/TSGL-EEGNet
|
c7597ea49c80116672005d59c070313ea44fe3c6
|
[
"MIT"
] | null | null | null |
# coding:utf-8
import os
import gc
import sys
import math
import copy
import time
import logging
import itertools
import numpy as np
from numpy.core.numeric import cross
import tensorflow as tf
import matplotlib.pyplot as plt
from tensorflow.python.keras.api._v2.keras import backend as K
from core.models import EEGNet, TSGLEEGNet, ShallowConvNet, DeepConvNet, MB3DCNN, EEGAttentionNet
from core.splits import StratifiedKFold
from core.callbacks import MyModelCheckpoint, EarlyStopping
from core.utils import standardization, computeKappa
_console = sys.stdout
def create_EEGAttentionNet(nClasses,
Samples,
Chans=22,
Colors=1,
F=9,
D=4,
kernLength=64,
optimizer=tf.keras.optimizers.Adam,
lrate=1e-3,
loss='sparse_categorical_crossentropy',
metrics=['accuracy'],
summary=True):
model = EEGAttentionNet(nClasses,
Chans=Chans,
Colors=Colors,
Samples=Samples,
kernLength=kernLength,
F1=F,
D=D)
model.compile(optimizer=optimizer(lrate), loss=loss, metrics=metrics)
if summary:
model.summary()
# export graph of the model
# tf.keras.utils.plot_model(model, 'EEGNet.png', show_shapes=True)
return model
def create_MB3DCNN(nClasses,
H,
W,
Samples,
optimizer=tf.keras.optimizers.Adam,
lrate=1e-3,
loss='sparse_categorical_crossentropy',
metrics=['accuracy'],
summary=True):
model = MB3DCNN(nClasses, H=H, W=W, Samples=Samples)
model.compile(optimizer=optimizer(lrate), loss=loss, metrics=metrics)
if summary:
model.summary()
# export graph of the model
# tf.keras.utils.plot_model(model, 'MB3DCNN.png', show_shapes=True)
return model
def create_EEGNet(nClasses,
Samples,
Chans=22,
F=9,
D=4,
Ns=4,
kernLength=64,
FSLength=16,
dropoutRate=0.5,
optimizer=tf.keras.optimizers.Adam,
lrate=1e-3,
loss='sparse_categorical_crossentropy',
metrics=['accuracy'],
summary=True):
model = EEGNet(nClasses,
Chans=Chans,
Samples=Samples,
kernLength=kernLength,
FSLength=FSLength,
dropoutRate=dropoutRate,
F1=F,
D=D,
F2=nClasses * 2 * Ns)
model.compile(optimizer=optimizer(lrate), loss=loss, metrics=metrics)
if summary:
model.summary()
# export graph of the model
# tf.keras.utils.plot_model(model, 'EEGNet.png', show_shapes=True)
return model
def create_TSGLEEGNet(nClasses,
Samples,
Chans=22,
Colors=1,
F=9,
D=4,
Ns=4,
kernLength=64,
FSLength=16,
dropoutRate=0.5,
l1=1e-4,
l21=1e-4,
tl1=1e-5,
optimizer=tf.keras.optimizers.Adam,
lrate=1e-3,
loss='sparse_categorical_crossentropy',
metrics=['accuracy'],
summary=True):
model = TSGLEEGNet(nClasses,
Chans=Chans,
Samples=Samples,
Colors=Colors,
kernLength=kernLength,
FSLength=FSLength,
dropoutRate=dropoutRate,
F1=F,
D=D,
F2=nClasses * 2 * Ns,
l1=l1,
l21=l21,
tl1=tl1)
model.compile(optimizer=optimizer(lrate), loss=loss, metrics=metrics)
if summary:
model.summary()
# export graph of the model
# tf.keras.utils.plot_model(model, 'rawEEGConvNet.png', show_shapes=True)
return model
class crossValidate(object):
'''
Class for K-fold Cross Validation.
This framework can collect `model`, `loss`, `acc` and `history` from each fold, and
save them into files.
Data spliting methods from sklearn.model_selection are supported. you can pass the
classes as `splitMethod`.
This class has implemented a magic method `__call__()` wrapping `call()`, for which
it can be used like a function.
Parameters
----------
```txt
built_fn : function, Create Training model which need to cross-validate.
Please using string `create_` at the begining of function name,
like `create_modelname`.
dataGent : class, Generate data for @built_fn, shapes (n_trails, ...).
It should discriminate data and label.
More details see core.generators.
splitMethod : class, Support split methods from module sklearn.model_selection.
kFold : int, Number of K-fold.
shuffle : bool, Optional Whether to shuffle each class's samples before
splitting into batches, default = False.
random_state : int, RandomState instance or None, optional, default = None.
If int, random_state is the seed used by the random number
generator; If RandomState instance, random_state is the random
number generator; If None, the random number generator is the
RandomState instance used by np.random. Used when shuffle == True.
subs : list, list of subjects' number, like `range(1, 10)`.
cropping : bool, Switch of cropped training. Default = False.
winLength : int, cropping window length, default = 2*srate.
cpt : float, cropping sencond, optional, only available when `winLength`
is not specified.
step : int, cropping step, default = 4.
standardizing : bool, Switch of standardizing data. Default = True.
batch_size : int, Batch size.
epochs : int, Training epochs.
patience : int, Early stopping patience.
verbose : int, One of 0, 1 and 2.
*a, *args : tuple, Parameters used by @dataGent and @built_fn respectively
**kw, **kwargs : dict, Parameters used by @dataGent and @built_fn respectively,
**kw should include parameters called `beg`, `end` and `srate`.
```
Returns
-------
```txt
avg_acc : list, Average accuracy for each subject with K-fold Cross Validation,
and total average accuracy is at the last of the list
avg_kappa : list, Average kappa for each subject with K-fold Cross Validation,
and total average kappa is at the last of the list
```
Example
-------
```python
from core.splits import StratifiedKFold
def create_model(Samples, *args, summary=True, **kwargs):
...
return keras_model
class dataGenerator:
def __init__(self, *a, beg=0, end=4, srate=250, **kw):
...
def __call__(self, filepath, label=False):
if label:
...
return label
else:
...
return data
...
...
avg_acc = crossValidate(
create_model,
dataGenerator,
beg=0,
end=4,
srate=250,
splitMethod=StratifiedKFold,
kFold=10,
subs=range(1, 10),
*a,
**kw)(*args, **kwargs)
```
Note
----
More details to see the codes.
'''
def __init__(self,
built_fn,
dataGent,
splitMethod=StratifiedKFold,
traindata_filepath=None,
testdata_filepath=None,
datadir=None,
beg=0.,
end=4.,
srate=250,
kFold=10,
shuffle=False,
random_state=None,
subs: list = range(1, 10),
cropping=False,
winLength=None,
cpt=None,
step=25,
standardizing=True,
batch_size=10,
epochs=300,
patience=100,
verbose=2,
preserve_initfile=False,
reinit=True,
*args,
**kwargs):
self.built_fn = built_fn
self.dataGent = dataGent(beg=beg,
end=end,
srate=srate,
*args,
**kwargs)
self.beg = beg
self.end = end
self.srate = srate
self.splitMethod = splitMethod
self.traindata_filepath = traindata_filepath
self.testdata_filepath = testdata_filepath
self.datadir = datadir
self.kFold = kFold
self.shuffle = shuffle
self.random_state = random_state
self.subs = subs
self.cropping = cropping
self.winLength = winLength
self.cpt = cpt
self.step = step
self.standardizing = standardizing
self.batch_size = batch_size
self.epochs = epochs
self.patience = patience
self.verbose = verbose
self.preserve_initfile = preserve_initfile
self.reinit = reinit
self.args = args
self.kwargs = kwargs
self.Samples = math.ceil(self.end * self.srate - self.beg * self.srate)
self._check_params()
if self.datadir:
for root, dirs, files in os.walk(self.datadir):
if files:
self.dn = files[0][0]
break
else:
self.dn = ''
self.modelstr = built_fn.__name__[7:]
if self.splitMethod.__name__ == 'AllTrain':
self.validation_name = 'Average Validation'
else:
self.validation_name = 'Cross Validation'
self._new_fold = True
self._last_batch = False
self._readed = False
self.X1 = None
self.Y1 = None
self.X2 = None
self.Y2 = None
if not os.path.exists('model'):
os.makedirs('model')
if not os.path.exists('result'):
os.makedirs('result')
# cropped training
if self.winLength:
if not isinstance(self.winLength, int):
raise TypeError('`winLength` must be passed as int.')
if self.winLength > (self.end - self.beg) * self.srate:
raise ValueError(
'`winLength` must less than or equal (`end` - '
'`beg`) * `srate`.')
if self.cpt and not self.winLength:
if (isinstance(self.cpt, float) or isinstance(self.cpt, int)):
if self.cpt <= self.end - self.beg:
self.winLength = self.cpt * self.srate
else:
raise ValueError(
'`cpt` must less than or equal `end` - `beg`.')
else:
raise TypeError('`cpt` must be passed as int or float.')
if not self.winLength:
self.winLength = 2 * self.srate
if self.step:
if not isinstance(self.step, int):
raise TypeError('`step` must be passed as int.')
else:
self.step = 4
def call(self, *args, **kwargs):
initfile = os.path.join('.', 'CV_initweight.h5')
tm = time.localtime()
dirname = (
'CV_{0:d}_{1:0>2d}_{2:0>2d}_{3:0>2d}_{4:0>2d}_{5:0>2d}_{6:s}'.
format(tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min,
tm.tm_sec, self.modelstr))
if not os.path.exists(os.path.join('model', dirname)):
os.mkdir(os.path.join('model', dirname))
if not os.path.exists(os.path.join('result', dirname)):
os.mkdir(os.path.join('result', dirname))
if self.cropping:
gent = self._gent_cropped_data
self.Samples -= self.winLength
else:
gent = self._gent_data
if not self.reinit:
model = self.built_fn(*args, **kwargs, Samples=self.Samples)
model.save_weights(initfile)
earlystopping = EarlyStopping(monitor='val_loss',
min_delta=0,
patience=self.patience,
verbose=0,
mode='auto')
filename = ''
for key in kwargs.keys():
if key in ['l1', 'l21', 'tl1']:
filename += '{0:s}({1:.8f})_'.format(key, kwargs[key])
else:
filename += '{0:s}({1:0>2d})_'.format(key, kwargs[key])
avg_acci = []
avg_kappai = []
win_subs_list = []
for i in self.subs:
accik = []
kappaik = []
k = 0 # count kFolds
# cropped training
t = 0 # record model's saving time
c = 0 # count windows
win = 0 # selected windows
win_list = [] # selected windows list
for data in gent(subject=i):
if self._new_fold: # new fold for cropped training
self._new_fold = False
t = 0
c = 0
if self.reinit:
model = self.built_fn(*args,
**kwargs,
Samples=self.Samples)
k += 1
filepath = os.path.join(
'result', dirname,
filename + '{:s}.txt'.format(self.modelstr))
with open(filepath, 'w+') as f:
sys.stdout = f
print(('{0:s} {1:d}-fold ' + self.validation_name +
' Accuracy').format(self.modelstr, self.kFold))
print('Subject {:0>2d} fold {:0>2d} in processing'.
format(i, k))
sys.stdout = _console
f.seek(0, 0)
for line in f.readlines():
print(line)
f.close()
filepath = os.path.join(
'model', dirname, filename + self.dn +
'0{0:d}T_{1:s}({2:d}).h5'.format(i, self.modelstr, k))
checkpointer = MyModelCheckpoint(filepath=filepath,
verbose=1,
save_best_only=True,
statistic_best=True,
p=0.05)
history = {}
else:
c += 1
# TODO: fit(), evaluate() with tf.data.Dataset, then `self._new_fold`
# and `self._last_batch` will be DEPRECATED.
history = dict(
list(history.items()) + list(
model.fit(x=data['x_train'],
y=data['y_train'],
batch_size=self.batch_size,
epochs=self.epochs,
callbacks=[checkpointer, earlystopping],
verbose=self.verbose,
validation_data=[
data['x_val'], data['y_val']
]).history.items()))
if self.cropping:
if not t == os.path.getmtime(checkpointer._filepath):
t = os.path.getmtime(checkpointer._filepath)
win = c
# tf.keras.models.Model.fit()
# tf.keras.models.Model.evaluate()
# tf.data.Dataset.from_generator()
# load the best model for cropped training or evaluating its accuracy
model.load_weights(filepath)
if self._last_batch: # the last batch for cropped training
self._last_batch = False
if self.cropping:
win_list.append(win)
x_test = data['x_test'][:, :, win *
self.step:win * self.step +
self.Samples, :]
pd = model.predict(x_test, verbose=0)
pred = np.argmax(pd, axis=1)
acc = np.mean(
np.squeeze(pred) == np.squeeze(data['y_test']))
kappa = computeKappa(pred, data['y_test'])
print(
'win: {:0>2d}\nacc: {:.2%}\nkappa: {:.4f}'.format(
win, acc, kappa))
else:
loss, acc = model.evaluate(data['x_test'],
data['y_test'],
batch_size=self.batch_size,
verbose=self.verbose)
_pred = model.predict(data['x_test'],
batch_size=self.batch_size,
verbose=self.verbose)
pred = np.argmax(_pred, axis=1)
kappa = computeKappa(pred, data['y_test'])
# save the train history
filepath = filepath[:-3] + '.npy'
np.save(filepath, history)
# reset model's weights to train a new one next fold
if os.path.exists(initfile) and not self.reinit:
model.reset_states()
model.load_weights(initfile)
if self.reinit:
K.clear_session()
gc.collect()
accik.append(acc)
kappaik.append(kappa)
avg_acci.append(np.average(np.array(accik)))
avg_kappai.append(np.average(np.array(kappaik)))
win_subs_list.append(win_list)
print(win_list)
self._readed = False
del model
avg_acc = np.average(np.array(avg_acci))
avg_kappa = np.average(np.array(avg_kappai))
filepath = os.path.join(
'result',
'CV_{0:d}_{1:0>2d}_{2:0>2d}_{3:0>2d}_{4:0>2d}_{5:0>2d}_' \
'{6:s}.txt'.format(tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour,
tm.tm_min, tm.tm_sec, self.modelstr))
with open(filepath, 'w+') as f:
sys.stdout = f
print(('{0:s} {1:d}-fold ' + self.validation_name +
' Accuracy (kappa)').format(self.modelstr, self.kFold))
for i in range(len(self.subs)):
print('Subject {0:0>2d}: {1:.2%} ({2:.4f})'.format(
self.subs[i], avg_acci[i], avg_kappai[i]),
end='')
if self.cropping:
print(', Window:{:0>2d}'.format(win_subs_list[i][np.argmax(
np.bincount(win_subs_list[i]))]))
else:
print()
print('Average : {0:.2%} ({1:.4f})'.format(avg_acc, avg_kappa))
sys.stdout = _console
f.seek(0, 0)
for line in f.readlines():
print(line)
f.close()
if os.path.exists(initfile) and not self.preserve_initfile:
os.remove(initfile)
avg_acci.append(avg_acc)
avg_kappai.append(avg_kappa)
return avg_acci, avg_kappai
def __call__(self, *args, **kwargs):
'''Wraps `call()`.'''
return self.call(*args, **kwargs)
def getConfig(self):
config = {
'built_fn': self.built_fn,
'dataGent': self.dataGent,
'splitMethod': self.splitMethod,
'traindata_filepath': self.traindata_filepath,
'testdata_filepath': self.testdata_filepath,
'datadir': self.datadir,
'beg': self.beg,
'end': self.end,
'srate': self.srate,
'kFold': self.kFold,
'shuffle': self.shuffle,
'random_state': self.random_state,
'subs': self.subs,
'cropping': self.cropping,
'winLength': self.winLength,
'step': self.step,
'standardizing': self.standardizing,
'batch_size': self.batch_size,
'epochs': self.epochs,
'patience': self.patience,
'verbose': self.verbose,
'preserve_initfile': self.preserve_initfile,
'reinit': self.reinit,
'args': self.args,
'kwargs': self.kwargs
}
return config
def setConfig(self,
built_fn,
dataGent,
splitMethod=StratifiedKFold,
traindata_filepath=None,
testdata_filepath=None,
datadir=None,
beg=0.,
end=4.,
srate=250,
kFold=10,
shuffle=False,
random_state=None,
subs: list = range(1, 10),
cropping=False,
winLength=None,
cpt=None,
step=25,
standardizing=True,
batch_size=10,
epochs=300,
patience=100,
verbose=2,
preserve_initfile=False,
reinit=True,
*args,
**kwargs):
self.built_fn = built_fn
self.dataGent = dataGent(beg=beg,
end=end,
srate=srate,
*args,
**kwargs)
self.beg = beg
self.end = end
self.srate = srate
self.splitMethod = splitMethod
self.traindata_filepath = traindata_filepath
self.testdata_filepath = testdata_filepath
self.datadir = datadir
self.kFold = kFold
self.shuffle = shuffle
self.random_state = random_state
self.subs = subs
self.cropping = cropping
self.winLength = winLength
self.cpt = cpt
self.step = step
self.standardizing = standardizing
self.batch_size = batch_size
self.epochs = epochs
self.patience = patience
self.verbose = verbose
self.preserve_initfile = preserve_initfile
self.reinit = reinit
self.args = args
self.kwargs = kwargs
self.Samples = math.ceil(self.end * self.srate - self.beg * self.srate)
self._check_params()
if self.datadir:
for root, dirs, files in os.walk(self.datadir):
if files:
self.dn = files[0][0]
break
self.modelstr = built_fn.__name__[7:]
if self.splitMethod.__name__ == 'AllTrain':
self.validation_name = 'Average Validation'
else:
self.validation_name = 'Cross Validation'
self._new_fold = True
self._last_batch = False
self._readed = False
self.X1 = None
self.Y1 = None
self.X2 = None
self.Y2 = None
if not os.path.exists('model'):
os.makedirs('model')
if not os.path.exists('result'):
os.makedirs('result')
# cropped training
if self.winLength:
if not isinstance(self.winLength, int):
raise TypeError('`winLength` must be passed as int.')
if self.winLength > (self.end - self.beg) * self.srate:
raise ValueError(
'`winLength` must less than or equal (`end` - '
'`beg`) * `srate`.')
if self.cpt and not self.winLength:
if (isinstance(self.cpt, float) or isinstance(self.cpt, int)):
if self.cpt <= self.end - self.beg:
self.winLength = self.cpt * self.srate
else:
raise ValueError(
'`cpt` must less than or equal `end` - `beg`.')
else:
raise TypeError('`cpt` must be passed as int or float.')
if not self.winLength:
self.winLength = 2 * self.srate
if self.step:
if not isinstance(self.step, int):
raise TypeError('`step` must be passed as int.')
else:
self.step = 4
@staticmethod
def _standardize(data: dict, trialaxis=0):
'''Standardizing (z-score) on each trial, supports np.nan numbers'''
# suppose every trials are independent to each other
meta = ['x_train', 'x_test', 'x_val']
# to prevent different objects be the same one
data = copy.deepcopy(data)
for s in meta:
if s in data and not data[s] is None:
_len = len(data[s].shape)
if _len > 1:
axis = list(range(_len))
axis.pop(trialaxis)
axis = tuple(axis)
else:
axis = -1
# z-score on trials
data[s] = standardization(data[s], axis=axis)
return data
def _read_data(self, subject, mode):
'''
Read data from dataGent.
Parameters
----------
```txt
subject : int, Identifier of subject.
mode : str, One of 'train' and 'test'.
```
Yields
------
```txt
data : tuple, (x, y).
```
'''
meta = ['train', 'test']
if not isinstance(mode, str):
raise TypeError('`mode` must be passed as string.')
if not mode in meta:
raise ValueError('`mode` must be one of \'train\' and \'test\'.')
if mode == 'test':
if not self.testdata_filepath:
self.testdata_filepath = os.path.join(
self.datadir, 'Test',
self.dn + '0' + str(subject) + 'E.mat')
yield self.dataGent(self.testdata_filepath)
self.testdata_filepath = None
else:
yield self.dataGent(self.testdata_filepath)
else:
if not self.traindata_filepath:
self.traindata_filepath = os.path.join(
self.datadir, 'Train',
self.dn + '0' + str(subject) + 'T.mat')
yield self.dataGent(self.traindata_filepath)
self.traindata_filepath = None
else:
yield self.dataGent(self.traindata_filepath)
def _gent_data(self, subject):
'''
Generate (data, label) from dataGent.
Parameters
----------
```txt
subject : int, Identifier of subject.
```
Yields
------
```txt
data : dict, Includes train, val and test data.
```
'''
data = {
'x_train': None,
'y_train': None,
'x_val': None,
'y_val': None,
'x_test': None,
'y_test': None
}
if not self._readed:
# for once
for (self.X1, self.Y1) in self._read_data(subject=subject,
mode='test'):
pass
for (self.X2, self.Y2) in self._read_data(subject=subject,
mode='train'):
self._readed = True
data['x_test'] = self.X1
data['y_test'] = self.Y1
# for multiple times
for (x1, y1), (x2, y2) in self._spilt(self.X2, self.Y2):
data['x_train'] = x1
data['y_train'] = y1
data['x_val'] = x2
data['y_val'] = y2
if self.standardizing:
data = self._standardize(data)
if data['x_val'] is None:
data['x_val'] = data['x_test']
data['y_val'] = data['y_test']
self._new_fold = True
self._last_batch = True
yield data
def _gent_cropped_data(self, subject):
'''
Generate cropped (data, label) from dataGent.
Not including test data.
Parameters
----------
```txt
subject : int, Identifier of subject.
```
Yields
------
```txt
data : dict, Includes train, val and test data.
```
'''
temp = {
'x_train': None,
'y_train': None,
'x_val': None,
'y_val': None,
'x_test': None,
'y_test': None
}
L = range(0, self.Samples + 1, self.step)
L = len(L)
print('len(L): {0:d}'.format(L))
if not self._readed:
# for once
for (self.X1, self.Y1) in self._read_data(subject=subject,
mode='test'):
pass
for (self.X2, self.Y2) in self._read_data(subject=subject,
mode='train'):
self._readed = True
temp['x_test'] = self.X1
temp['y_test'] = self.Y1
for (x1, y1), (x2, y2) in self._spilt(self.X2, self.Y2):
temp['x_train'] = x1
temp['y_train'] = y1
temp['x_val'] = x2
temp['y_val'] = y2
if temp['x_val'] is None:
if self.standardizing:
data = self._standardize(temp)
temp['x_val'] = data['x_test']
temp['y_val'] = data['y_test']
temp['x_test'] = data['x_test']
temp['y_test'] = data['y_test']
else:
data['x_train'] = x1
data['x_val'] = temp['x_val']
else:
if self.standardizing:
data = self._standardize(temp)
temp['x_test'] = data['x_test']
temp['y_test'] = data['y_test']
else:
data['x_train'] = x1
data['x_val'] = x2
i = 0
for (temp['x_train'], temp['x_val']) in self._cropping_data(
(data['x_train'], data['x_val'])):
i += 1
if i == 1:
self._new_fold = True
if i == L:
self._last_batch = True
yield temp
def _cropping_data(self, datas: tuple):
L = range(0, self.Samples + 1, self.step)
for i in L:
temp = ()
for data in datas:
temp += (data[:, :, i:i + self.winLength, :], )
yield temp
def _spilt(self, X, y, groups=None):
"""
Generate indices to split data into training and test set.
Parameters
----------
X : array-like, shape (n_samples, n_features)
Training data, where n_samples is the number of samples
and n_features is the number of features.
y : array-like, shape (n_samples,)
The target variable for supervised learning problems.
groups : array-like, with shape (n_samples,), optional
Group labels for the samples used while splitting the dataset into
train/test set. Action depends on the split method you choose.
Yields
------
train : ndarray
The training set indices for that split.
val : ndarray
The validating set indices for that split.
"""
sm = self.splitMethod(n_splits=self.kFold,
shuffle=self.shuffle,
random_state=self.random_state)
for train_index, val_index in sm.split(X, y, groups):
# (x_train, y_train), (x_val, y_val)
if not train_index.any():
raise ValueError('Training data shouldn\'t be empty.')
elif not val_index.any():
yield (X[train_index], y[train_index]), (None, None)
else:
yield (X[train_index], y[train_index]), (X[val_index],
y[val_index])
def _check_params(self):
'''
Cross Validate check parameters out.
'''
# TODO: check parameters out.
pass
class gridSearch(crossValidate):
'''
Class for K-fold Cross Validation Grid Search.
Grid Search method. May better to be a subclass of `crossValidate`.
This framework can collect `model`, `loss`, `acc` and `history` from each fold, and
save them into files.
Data spliting methods from sklearn.model_selection are supported. you can pass the
classes as `splitMethod`.
It can't use multiple GPUs to speed up now. To grid search on a large parameter
matrix, you should use `Greedy Algorithm`.
This class has implemented a magic method `__call__()` wrapping `call()`, for which
it can be used like a function.
Parameters
----------
```txt
built_fn : function, Create Training model which need to cross-validate.
Please using string `create_` at the begining of function name,
like `create_modelname`.
parameters : dict, Parameters need to grid-search. Keys are the parameters'
name, and every parameter values are vectors which should be
passed as a list.
dataGent : class, Generate data for @built_fn, shapes (n_trails, ...).
It should discriminate data and label.
More details see core.generators.
splitMethod : class, Support split methods from module sklearn.model_selection.
kFold : int, Number of K-fold.
shuffle : bool, Optional Whether to shuffle each class's samples before
splitting into batches, default = False.
random_state : int, RandomState instance or None, optional, default = None.
If int, random_state is the seed used by the random number
generator; If RandomState instance, random_state is the random
number generator; If None, the random number generator is the
RandomState instance used by np.random. Used when shuffle == True.
subs : list, list of subjects' number, like `range(1, 10)`.
cropping : bool, Switch of cropped training. Default = False.
winLength : int, cropping window length, default = 2*srate.
step : int, cropping step, default = 1.
standardizing : bool, Switch of standardizing data. Default = True.
batch_size : int, Batch size.
epochs : int, Training epochs.
patience : int, Early stopping patience.
verbose : int, One of 0, 1 and 2.
*a, *args : tuple, Parameters used by @dataGent and @built_fn respectively
**kw, **kwargs : dict, Parameters used by @dataGent and @built_fn respectively,
**kw should include parameters called `beg`, `end` and `srate`.
```
Returns
-------
```txt
avg_acc : list, Average accuracy for each subject with K-fold Cross Validation,
and total average accuracy is at the last of the list
avg_kappa : list, Average kappa for each subject with K-fold Cross Validation,
and total average kappa is at the last of the list
```
Example
-------
```python
from core.splits import StratifiedKFold
def create_model(Samples, *args, summary=True, **kwargs):
...
return keras_model
class dataGenerator:
def __init__(self, *a, beg=0, end=4, srate=250, **kw):
...
def __call__(self, filepath, label=False):
if label:
...
return label
else:
...
return data
...
...
parameters = {'para1':[...], 'para2':[...], ...}
avg_acc = gridSearch(
create_model,
parameters,
dataGenerator,
beg=0,
end=4,
srate=250,
splitMethod=StratifiedKFold,
kFold=10,
subs=range(1, 10),
*a,
**kw)(*args, **kwargs)
```
Note
----
More details to see the codes.
'''
def __init__(self,
built_fn,
parameters: dict,
dataGent,
splitMethod=StratifiedKFold,
traindata_filepath=None,
testdata_filepath=None,
datadir=None,
beg=0,
end=4,
srate=250,
kFold=10,
shuffle=False,
random_state=None,
subs=range(1, 10),
cropping=False,
winLength=None,
cpt=None,
step=25,
standardizing=True,
batch_size=10,
epochs=300,
patience=100,
verbose=2,
preserve_initfile=False,
reinit=False,
*args,
**kwargs):
super().__init__(built_fn=built_fn,
dataGent=dataGent,
splitMethod=splitMethod,
traindata_filepath=traindata_filepath,
testdata_filepath=testdata_filepath,
datadir=datadir,
beg=beg,
end=end,
srate=srate,
kFold=kFold,
shuffle=shuffle,
random_state=random_state,
subs=subs,
cropping=cropping,
winLength=winLength,
cpt=cpt,
step=step,
standardizing=standardizing,
batch_size=batch_size,
epochs=epochs,
patience=patience,
verbose=verbose,
preserve_initfile=preserve_initfile,
reinit=reinit,
*args,
**kwargs)
_subs_targeted = False
_subs_targeted_parameters = []
for parameter in parameters:
if not parameter in self.built_fn.__code__.co_varnames:
raise ValueError('`parameters` has unsupported parameter in'
' `built_fn`.')
if not isinstance(parameters[parameter], list) and not isinstance(
parameters[parameter], dict):
parameters[parameter] = list(parameters[parameter])
if isinstance(parameters[parameter], dict):
subs = list(self.subs)
for subject in parameters[parameter]:
if not int(subject) in self.subs:
raise ValueError('`parameters` has unsolved subject'
' numbers.')
if not isinstance(parameters[parameter][subject], list):
parameters[parameter][subject] = list(
parameters[parameter][subject])
subs.remove(int(subject))
if subs:
raise ValueError('`parameters` doesn\'t include all the'
' subject numbers.')
_subs_targeted = True
_subs_targeted_parameters.append(parameter)
temp = []
if _subs_targeted:
for subject in range(1, max(self.subs) + 1):
items = []
for parameter in parameters:
if subject in self.subs:
if parameter in _subs_targeted_parameters:
items += list({
parameter:
parameters[parameter][str(subject)]
}.items())
else:
items += list({parameter:
parameters[parameter]}.items())
temp.append(dict(items))
else:
for subject in range(1, max(self.subs) + 1):
if subject in self.subs:
temp.append(parameters)
else:
temp.append([])
self.parameters = temp
def call(self, *args, **kwargs):
'''
parameters should be lists to different subjects, then pass one
subject's parameter to cv.
'''
initfile = os.path.join('.', 'GSCV_initweight.h5')
tm = time.localtime()
dirname = (
'GS_{0:d}_{1:0>2d}_{2:0>2d}_{3:0>2d}_{4:0>2d}_{5:0>2d}_{6:s}'.
format(tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour, tm.tm_min,
tm.tm_sec, self.modelstr))
if not os.path.exists(os.path.join('model', dirname)):
os.mkdir(os.path.join('model', dirname))
if not os.path.exists(os.path.join('result', dirname)):
os.mkdir(os.path.join('result', dirname))
if self.cropping:
gent = self._gent_cropped_data
self.Samples -= self.winLength
else:
gent = self._gent_data
earlystopping = EarlyStopping(monitor='val_loss',
min_delta=0,
patience=self.patience,
verbose=0,
mode='auto')
def cv(*args, **kwargs):
'''one subject, one parameter'''
if not self.reinit:
if not os.path.exists(initfile):
model = self.built_fn(*args,
**kwargs,
Samples=self.Samples)
model.save_weights(initfile)
else:
model = self.built_fn(*args,
**kwargs,
Samples=self.Samples)
model.load_weights(initfile)
filename = ''
for key in kwargs.keys():
if key in ['l1', 'l21', 'tl1']:
filename += '{0:s}({1:.8f})_'.format(key, kwargs[key])
else:
filename += '{0:s}({1:0>2d})_'.format(key, kwargs[key])
acck = []
kappak = []
k = 0 # count kFolds
# cropped training
t = 0 # record model's saving time
c = 0 # count windows
win = 0 # selected windows
win_list = [] # selected windows list
for data in gent(subject=self.subs):
if self._new_fold: # new fold for cropped training
self._new_fold = False
t = 0
c = 0
if self.reinit:
model = self.built_fn(*args,
**kwargs,
Samples=self.Samples)
k += 1
filepath = os.path.join(
'model', dirname,
filename + self.dn + '0{0:d}T_{1:s}({2:d}).h5'.format(
self.subs, self.modelstr, k))
checkpointer = MyModelCheckpoint(filepath=filepath,
verbose=1,
save_best_only=True,
statistic_best=True,
p=0.05)
history = {}
# TODO: fit(), evaluate() with tf.data.Dataset, then `self._new_fold`
# and `self._last_batch` will be DEPRECATED.
history = dict(
list(history.items()) + list(
model.fit(x=data['x_train'],
y=data['y_train'],
batch_size=self.batch_size,
epochs=self.epochs,
callbacks=[checkpointer, earlystopping],
verbose=self.verbose,
validation_data=[
data['x_val'], data['y_val']
]).history.items()))
if self.cropping:
if not t == os.path.getmtime(checkpointer._filepath):
t = os.path.getmtime(checkpointer._filepath)
win = c
# load the best model for cropped training or evaluating its accuracy
model.load_weights(filepath)
if self._last_batch: # the last batch for cropped training
self._last_batch = False
if self.cropping:
win_list.append(win)
x_test = data['x_test'][:, :, win *
self.step:win * self.step +
self.Samples, :]
pd = model.predict(x_test, verbose=0)
pred = np.argmax(pd, axis=1)
acc = np.mean(
np.squeeze(pred) == np.squeeze(data['y_test']))
kappa = computeKappa(pred, data['y_test'])
else:
loss, acc = model.evaluate(data['x_test'],
data['y_test'],
batch_size=self.batch_size,
verbose=self.verbose)
_pred = model.predict(data['x_test'],
batch_size=self.batch_size,
verbose=self.verbose)
pred = np.argmax(_pred, axis=1)
kappa = computeKappa(pred, data['y_test'])
# save the train history
npy_filepath = filepath[:-3] + '.npy'
np.save(npy_filepath, history)
# reset model's weights to train a new one next fold
if os.path.exists(initfile) and not self.reinit:
model.reset_states()
model.load_weights(initfile)
if self.reinit:
K.clear_session()
gc.collect()
acck.append(acc)
kappak.append(kappa)
data.clear()
del data
K.clear_session()
del model
gc.collect()
avg_acc = np.average(np.array(acck))
avg_kappa = np.average(np.array(kappak))
win = win_list[np.argmax(np.bincount(win_list))]
filepath = os.path.join(
'result', dirname, filename + self.dn +
'0{0:d}T_{1:s}.txt'.format(self.subs, self.modelstr))
with open(filepath, 'w+') as f:
sys.stdout = f
print(('{0:s} {1:d}-fold ' + self.validation_name +
' Accuracy').format(self.modelstr, self.kFold))
print('Subject {0:0>2d}'.format(self.subs))
for i in range(len(acck)):
print('Fold {0:0>2d}: {1:.2%} ({2:.4f})'.format(
i + 1, acck[i], kappak[i]))
if self.cropping:
print('Window:{:0>2d}'.format(win))
print('Average : {0:.2%} ({1:.4f})'.format(
avg_acc, avg_kappa))
sys.stdout = _console
f.seek(0, 0)
for line in f.readlines():
print(line)
f.close()
return avg_acc, avg_kappa
parameters = []
max_avg_acc = []
max_acc_kappa = []
indices = []
subs = copy.copy(self.subs)
filepath = os.path.join(
'result',
'GS_{0:d}_{1:0>2d}_{2:0>2d}_{3:0>2d}_{4:0>2d}_{5:0>2d}_' \
'{6:s}.txt'.format(tm.tm_year, tm.tm_mon, tm.tm_mday, tm.tm_hour,
tm.tm_min, tm.tm_sec, self.modelstr))
for subject in subs:
parameters.append(self._combination(subject=subject))
count = 0
with open(filepath, 'w+') as f:
sys.stdout = f
print('Subject: {0:0>2d}/{1:0>2d}'.format(subject, len(subs)))
print(
'Grid Search progress: {0:0>4d}/{1:0>4d}' \
'\nThe No.{2:0>4d} is in processing'
.format(count, len(parameters[-1]), count + 1))
sys.stdout = _console
f.seek(0, 0)
for line in f.readlines():
print(line)
f.close()
avg_acc = []
avg_kappa = []
for parameter in parameters[-1]:
self.subs = subject
param = dict(parameter + list(kwargs.items()))
acc, kappa = cv(*args, **param)
avg_acc.append(acc)
avg_kappa.append(kappa)
count += 1
with open(filepath, 'w+') as f:
sys.stdout = f
print('Subject: {0:0>2d}/{1:0>2d}'.format(
subject, len(subs)))
if count < len(parameters[-1]):
print(
'Grid Search progress: {0:0>4d}/{1:0>4d}' \
'\nThe No.{2:0>4d} is in processing'
.format(count, len(parameters[-1]), count + 1))
else:
print('Grid Search progress: {0:0>4d}/{1:0>4d}'.format(
count, len(parameters[-1])))
sys.stdout = _console
f.seek(0, 0)
for line in f.readlines():
print(line)
f.close()
self._readed = False
max_avg_acc.append(np.max(avg_acc))
indices.append(np.argmax(avg_acc))
max_acc_kappa.append(avg_kappa[indices[-1]])
self.subs = subs
if os.path.exists(initfile) and not self.preserve_initfile:
os.remove(initfile)
with open(filepath, 'w+') as f:
sys.stdout = f
print(('{0:s} {1:d}-fold ' + self.validation_name +
'Grid Search Accuracy (kappa)').format(
self.modelstr, self.kFold))
for i in range(len(self.subs)):
print('Subject {0:0>2d}: {1:.2%} ({2:.4f})'.format(
self.subs[i], max_avg_acc[i], max_acc_kappa[i]))
print('Parameters', end='')
for n in range(len(parameters[i][indices[i]])):
if n == 0:
print(': {0:s} = {1:.8f}'.format(
parameters[i][indices[i]][n][0],
parameters[i][indices[i]][n][1]),
end='')
else:
print(', {0:s} = {1:.8f}'.format(
parameters[i][indices[i]][n][0],
parameters[i][indices[i]][n][1]),
end='')
print()
print('Average : {:.2%} ({:.4f})'.format(
np.average(max_avg_acc), np.average(max_acc_kappa)))
sys.stdout = _console
f.seek(0, 0)
for line in f.readlines():
print(line)
f.close()
avg_acc = max_avg_acc
avg_kappa = max_acc_kappa
avg_acc.append(np.average(max_avg_acc))
avg_kappa.append(np.average(max_acc_kappa))
return avg_acc, avg_kappa
def _combination(self, subject):
'''Solve the combaination of parameters given to Grid Search'''
parameters = []
parameter = []
keys = list(self.parameters[subject - 1].keys())
values = list(itertools.product(*self.parameters[subject -
1].values()))
for v in values:
for i in range(len(v)):
parameter.append((keys[i], v[i]))
parameters.append(parameter)
parameter = []
return parameters
def getConfig(self):
config = {'parameters': self.parameters}
base_config = super(crossValidate, self).getConfig()
return dict(list(base_config.items()) + list(config.items()))
def getSuperConfig(self):
return super(crossValidate, self).getConfig()
def setConfig(self,
built_fn,
parameters: dict,
dataGent,
splitMethod=StratifiedKFold,
traindata_filepath=None,
testdata_filepath=None,
datadir=None,
beg=0,
end=4,
srate=250,
kFold=10,
shuffle=False,
random_state=None,
subs=range(1, 10),
cropping=False,
winLength=None,
cpt=None,
step=25,
standardizing=True,
batch_size=10,
epochs=300,
patience=100,
verbose=2,
preserve_initfile=False,
reinit=False,
*args,
**kwargs):
super().setConfig(built_fn=built_fn,
dataGent=dataGent,
splitMethod=splitMethod,
traindata_filepath=traindata_filepath,
testdata_filepath=testdata_filepath,
datadir=datadir,
beg=beg,
end=end,
srate=srate,
kFold=kFold,
shuffle=shuffle,
random_state=random_state,
subs=subs,
cropping=cropping,
winLength=winLength,
cpt=cpt,
step=step,
standardizing=standardizing,
batch_size=batch_size,
epochs=epochs,
patience=patience,
verbose=verbose,
preserve_initfile=preserve_initfile,
reinit=reinit,
*args,
**kwargs)
_subs_targeted = False
_subs_targeted_parameters = []
for parameter in parameters:
if not parameter in self.built_fn.__code__.co_varnames:
raise ValueError('`parameters` has unsupported parameter in'
' `built_fn`.')
if not isinstance(parameters[parameter], list) and not isinstance(
parameters[parameter], dict):
parameters[parameter] = list(parameters[parameter])
if isinstance(parameters[parameter], dict):
subs = list(self.subs)
for subject in parameters[parameter]:
if not int(subject) in self.subs:
raise ValueError('`parameters` has unsolved subject'
' numbers.')
if not isinstance(parameters[parameter][subject], list):
parameters[parameter][subject] = list(
parameters[parameter][subject])
subs.remove(int(subject))
if subs:
raise ValueError('`parameters` doesn\'t include all the'
' subject numbers.')
_subs_targeted = True
_subs_targeted_parameters.append(parameter)
temp = []
if _subs_targeted:
for subject in range(1, max(self.subs) + 1):
items = []
for parameter in parameters:
if subject in self.subs:
if parameter in _subs_targeted_parameters:
items += list({
parameter:
parameters[parameter][str(subject)]
}.items())
else:
items += list({parameter:
parameters[parameter]}.items())
temp.append(dict(items))
else:
for subject in range(1, max(self.subs) + 1):
if subject in self.subs:
temp.append(parameters)
else:
temp.append([])
self.parameters = temp
| 39.128947
| 97
| 0.456941
| 5,797
| 59,476
| 4.573918
| 0.086597
| 0.00792
| 0.006789
| 0.003734
| 0.802942
| 0.764888
| 0.742599
| 0.735018
| 0.729587
| 0.72095
| 0
| 0.016424
| 0.446163
| 59,476
| 1,520
| 98
| 39.128947
| 0.788525
| 0.174558
| 0
| 0.755814
| 0
| 0.003578
| 0.063825
| 0.008287
| 0
| 0
| 0
| 0.001974
| 0
| 1
| 0.020572
| false
| 0.008945
| 0.015206
| 0.000894
| 0.049195
| 0.029517
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9d661f3f38a65363f18add1c5424e9a412e09bb2
| 2,799
|
py
|
Python
|
src/RIOT/tests/gnrc_sock_ip/tests/01-run.py
|
ARte-team/ARte
|
19f17f57522e1b18ba390718fc94be246451837b
|
[
"MIT"
] | 2
|
2020-04-30T08:17:45.000Z
|
2020-05-23T08:46:54.000Z
|
src/RIOT/tests/gnrc_sock_ip/tests/01-run.py
|
ARte-team/ARte
|
19f17f57522e1b18ba390718fc94be246451837b
|
[
"MIT"
] | null | null | null |
src/RIOT/tests/gnrc_sock_ip/tests/01-run.py
|
ARte-team/ARte
|
19f17f57522e1b18ba390718fc94be246451837b
|
[
"MIT"
] | 1
|
2020-02-21T09:21:45.000Z
|
2020-02-21T09:21:45.000Z
|
#!/usr/bin/env python3
# Copyright (C) 2016 Kaspar Schleiser <kaspar@schleiser.de>
#
# This file is subject to the terms and conditions of the GNU Lesser
# General Public License v2.1. See the file LICENSE in the top level
# directory for more details.
import sys
from testrunner import run
def testfunc(child):
child.expect_exact(u"Calling test_sock_ip_create__EAFNOSUPPORT()")
child.expect_exact(u"Calling test_sock_ip_create__EINVAL_addr()")
child.expect_exact(u"Calling test_sock_ip_create__EINVAL_netif()")
child.expect_exact(u"Calling test_sock_ip_create__no_endpoints()")
child.expect_exact(u"Calling test_sock_ip_create__only_local()")
child.expect_exact(u"Calling test_sock_ip_create__only_local_reuse_ep()")
child.expect_exact(u"Calling test_sock_ip_create__only_remote()")
child.expect_exact(u"Calling test_sock_ip_create__full()")
child.expect_exact(u"Calling test_sock_ip_recv__EADDRNOTAVAIL()")
child.expect_exact(u"Calling test_sock_ip_recv__ENOBUFS()")
child.expect_exact(u"Calling test_sock_ip_recv__EPROTO()")
child.expect_exact(u"Calling test_sock_ip_recv__ETIMEDOUT()")
child.expect_exact(u" * Calling sock_ip_recv()")
child.expect(r" \* \(timed out with timeout \d+\)")
child.expect_exact(u"Calling test_sock_ip_recv__socketed()")
child.expect_exact(u"Calling test_sock_ip_recv__socketed_with_remote()")
child.expect_exact(u"Calling test_sock_ip_recv__unsocketed()")
child.expect_exact(u"Calling test_sock_ip_recv__unsocketed_with_remote()")
child.expect_exact(u"Calling test_sock_ip_recv__with_timeout()")
child.expect_exact(u"Calling test_sock_ip_send__EAFNOSUPPORT()")
child.expect_exact(u"Calling test_sock_ip_send__EINVAL_addr()")
child.expect_exact(u"Calling test_sock_ip_send__EINVAL_netif()")
child.expect_exact(u"Calling test_sock_ip_send__ENOTCONN()")
child.expect_exact(u"Calling test_sock_ip_send__socketed_no_local_no_netif()")
child.expect_exact(u"Calling test_sock_ip_send__socketed_no_netif()")
child.expect_exact(u"Calling test_sock_ip_send__socketed_no_local()")
child.expect_exact(u"Calling test_sock_ip_send__socketed()")
child.expect_exact(u"Calling test_sock_ip_send__socketed_other_remote()")
child.expect_exact(u"Calling test_sock_ip_send__unsocketed_no_local_no_netif()")
child.expect_exact(u"Calling test_sock_ip_send__unsocketed_no_netif()")
child.expect_exact(u"Calling test_sock_ip_send__unsocketed_no_local()")
child.expect_exact(u"Calling test_sock_ip_send__unsocketed()")
child.expect_exact(u"Calling test_sock_ip_send__no_sock_no_netif()")
child.expect_exact(u"Calling test_sock_ip_send__no_sock()")
child.expect_exact(u"ALL TESTS SUCCESSFUL")
if __name__ == "__main__":
sys.exit(run(testfunc))
| 52.811321
| 84
| 0.798142
| 438
| 2,799
| 4.550228
| 0.196347
| 0.193176
| 0.272955
| 0.290015
| 0.786252
| 0.77421
| 0.77421
| 0.77421
| 0.77421
| 0.617662
| 0
| 0.002782
| 0.101108
| 2,799
| 52
| 85
| 53.826923
| 0.789348
| 0.086102
| 0
| 0
| 0
| 0
| 0.5721
| 0.437696
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025
| false
| 0
| 0.05
| 0
| 0.075
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
9d67844b5451d2ba27cdfb64e883456bdae0fb01
| 155
|
py
|
Python
|
loldib/getratings/models/NA/na_kalista/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_kalista/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_kalista/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from .na_kalista_top import *
from .na_kalista_jng import *
from .na_kalista_mid import *
from .na_kalista_bot import *
from .na_kalista_sup import *
| 25.833333
| 30
| 0.774194
| 25
| 155
| 4.4
| 0.36
| 0.272727
| 0.590909
| 0.690909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 155
| 5
| 31
| 31
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
9d6f51f3d68d8a6bade585f3bfc7f6c8e9e89cc1
| 56,476
|
py
|
Python
|
src/genie/libs/parser/iosxr/tests/test_show_ethernet_yang.py
|
nujo/genieparser
|
083b01efc46afc32abe1a1858729578beab50cd3
|
[
"Apache-2.0"
] | 4
|
2020-08-20T12:23:12.000Z
|
2021-06-15T14:10:02.000Z
|
src/genie/libs/parser/iosxr/tests/test_show_ethernet_yang.py
|
nujo/genieparser
|
083b01efc46afc32abe1a1858729578beab50cd3
|
[
"Apache-2.0"
] | 119
|
2020-07-10T22:37:51.000Z
|
2021-03-18T02:40:05.000Z
|
src/genie/libs/parser/iosxr/tests/test_show_ethernet_yang.py
|
nujo/genieparser
|
083b01efc46afc32abe1a1858729578beab50cd3
|
[
"Apache-2.0"
] | 2
|
2021-02-12T21:42:30.000Z
|
2021-02-12T21:47:51.000Z
|
import re
import unittest
from unittest.mock import Mock
import xml.etree.ElementTree as ET
from pyats.topology import Device
from genie.ops.base import Context
from genie.metaparser.util.exceptions import SchemaEmptyParserError
from genie.libs.parser.iosxr.show_ethernet import ShowEthernetTrunkDetail, \
ShowEthernetTags
class test_show_ethernet_tags_yang(unittest.TestCase):
device = Device(name='aDevice')
device1 = Device(name='bDevice')
empty_output = {'execute.return_value': ''}
golden_parsed_output = {'interface': {'GigabitEthernet0/0/0/0': {'sub_interface': {'GigabitEthernet0/0/0/0.501': {'vlan_id': {'2': {'inner_encapsulation_type': 'dot1q',
'inner_encapsulation_vlan_id': '5',
'mtu': '1522',
'outer_encapsulation_type': 'dot1q'}}}}}}}
class etree_holder():
def __init__(self):
self.data = ET.fromstring('''
<data>
<interfaces xmlns="http://cisco.com/ns/yang/Cisco-IOS-XR-pfi-im-cmd-oper">
<interface-xr>
<interface>
<interface-name>GigabitEthernet0/0/0/0</interface-name>
<interface-handle>GigabitEthernet0/0/0/0</interface-handle>
<interface-type>IFT_GETHERNET</interface-type>
<hardware-type-string>GigabitEthernet</hardware-type-string>
<state>im-state-up</state>
<line-state>im-state-up</line-state>
<encapsulation>ether</encapsulation>
<encapsulation-type-string>ARPA</encapsulation-type-string>
<mtu>1514</mtu>
<is-l2-transport-enabled>false</is-l2-transport-enabled>
<state-transition-count>1</state-transition-count>
<last-state-transition-time>1100222</last-state-transition-time>
<is-dampening-enabled>false</is-dampening-enabled>
<speed>1000000</speed>
<duplexity>im-attr-duplex-full</duplexity>
<media-type>im-attr-media-other</media-type>
<link-type>im-attr-link-type-force</link-type>
<in-flow-control>im-attr-flow-control-off</in-flow-control>
<out-flow-control>im-attr-flow-control-off</out-flow-control>
<mac-address>
<address>52:54:00:ff:50:0e</address>
</mac-address>
<burned-in-address>
<address>52:54:00:ff:50:0e</address>
</burned-in-address>
<carrier-delay>
<carrier-delay-up>10</carrier-delay-up>
<carrier-delay-down>0</carrier-delay-down>
</carrier-delay>
<bandwidth>1000000</bandwidth>
<max-bandwidth>1000000</max-bandwidth>
<is-l2-looped>false</is-l2-looped>
<description></description>
<data-rates>
<input-data-rate>0</input-data-rate>
<input-packet-rate>0</input-packet-rate>
<output-data-rate>0</output-data-rate>
<output-packet-rate>0</output-packet-rate>
<peak-input-data-rate>0</peak-input-data-rate>
<peak-input-packet-rate>0</peak-input-packet-rate>
<peak-output-data-rate>0</peak-output-data-rate>
<peak-output-packet-rate>0</peak-output-packet-rate>
<bandwidth>1000000</bandwidth>
<load-interval>9</load-interval>
<output-load>0</output-load>
<input-load>0</input-load>
<reliability>255</reliability>
</data-rates>
<interface-statistics>
<stats-type>full</stats-type>
<full-interface-stats>
<packets-received>0</packets-received>
<bytes-received>0</bytes-received>
<packets-sent>0</packets-sent>
<bytes-sent>0</bytes-sent>
<multicast-packets-received>0</multicast-packets-received>
<broadcast-packets-received>0</broadcast-packets-received>
<multicast-packets-sent>0</multicast-packets-sent>
<broadcast-packets-sent>0</broadcast-packets-sent>
<output-drops>0</output-drops>
<output-queue-drops>0</output-queue-drops>
<input-drops>0</input-drops>
<input-queue-drops>0</input-queue-drops>
<runt-packets-received>0</runt-packets-received>
<giant-packets-received>0</giant-packets-received>
<throttled-packets-received>0</throttled-packets-received>
<parity-packets-received>0</parity-packets-received>
<unknown-protocol-packets-received>0</unknown-protocol-packets-received>
<input-errors>0</input-errors>
<crc-errors>0</crc-errors>
<input-overruns>0</input-overruns>
<framing-errors-received>0</framing-errors-received>
<input-ignored-packets>0</input-ignored-packets>
<input-aborts>0</input-aborts>
<output-errors>0</output-errors>
<output-underruns>0</output-underruns>
<output-buffer-failures>0</output-buffer-failures>
<output-buffers-swapped-out>0</output-buffers-swapped-out>
<applique>0</applique>
<resets>0</resets>
<carrier-transitions>1</carrier-transitions>
<availability-flag>0</availability-flag>
<last-data-time>1490888108</last-data-time>
<seconds-since-last-clear-counters>0</seconds-since-last-clear-counters>
<last-discontinuity-time>1489787869</last-discontinuity-time>
<seconds-since-packet-received>4294967295</seconds-since-packet-received>
<seconds-since-packet-sent>4294967295</seconds-since-packet-sent>
</full-interface-stats>
</interface-statistics>
<if-index>0</if-index>
</interface>
<interface>
<interface-name>GigabitEthernet0/0/0/0.501</interface-name>
<interface-handle>GigabitEthernet0/0/0/0.501</interface-handle>
<interface-type>IFT_VLAN_SUBIF</interface-type>
<hardware-type-string>VLAN sub-interface(s)</hardware-type-string>
<state>im-state-up</state>
<line-state>im-state-up</line-state>
<encapsulation>dot1q</encapsulation>
<encapsulation-type-string>802.1Q</encapsulation-type-string>
<mtu>1522</mtu>
<is-l2-transport-enabled>false</is-l2-transport-enabled>
<state-transition-count>1</state-transition-count>
<last-state-transition-time>1100222</last-state-transition-time>
<is-dampening-enabled>false</is-dampening-enabled>
<speed>1000000</speed>
<mac-address>
<address>52:54:00:ff:50:0e</address>
</mac-address>
<carrier-delay>
<carrier-delay-up>0</carrier-delay-up>
<carrier-delay-down>0</carrier-delay-down>
</carrier-delay>
<bandwidth>1000000</bandwidth>
<max-bandwidth>1000000</max-bandwidth>
<is-l2-looped>false</is-l2-looped>
<parent-interface-name>GigabitEthernet0/0/0/0</parent-interface-name>
<description></description>
<encapsulation-information>
<encapsulation-type>vlan</encapsulation-type>
<dot1q-information>
<encapsulation-details>
<vlan-encapsulation>qinq</vlan-encapsulation>
<stack>
<outer-tag>2</outer-tag>
<second-tag>5</second-tag>
</stack>
</encapsulation-details>
</dot1q-information>
</encapsulation-information>
<data-rates>
<input-data-rate>0</input-data-rate>
<input-packet-rate>0</input-packet-rate>
<output-data-rate>0</output-data-rate>
<output-packet-rate>0</output-packet-rate>
<peak-input-data-rate>0</peak-input-data-rate>
<peak-input-packet-rate>0</peak-input-packet-rate>
<peak-output-data-rate>0</peak-output-data-rate>
<peak-output-packet-rate>0</peak-output-packet-rate>
<bandwidth>1000000</bandwidth>
<load-interval>9</load-interval>
<output-load>0</output-load>
<input-load>0</input-load>
<reliability>255</reliability>
</data-rates>
<interface-statistics>
<stats-type>full</stats-type>
<full-interface-stats>
<packets-received>0</packets-received>
<bytes-received>0</bytes-received>
<packets-sent>0</packets-sent>
<bytes-sent>0</bytes-sent>
<multicast-packets-received>0</multicast-packets-received>
<broadcast-packets-received>0</broadcast-packets-received>
<multicast-packets-sent>0</multicast-packets-sent>
<broadcast-packets-sent>0</broadcast-packets-sent>
<output-drops>0</output-drops>
<output-queue-drops>0</output-queue-drops>
<input-drops>0</input-drops>
<input-queue-drops>0</input-queue-drops>
<runt-packets-received>0</runt-packets-received>
<giant-packets-received>0</giant-packets-received>
<throttled-packets-received>0</throttled-packets-received>
<parity-packets-received>0</parity-packets-received>
<unknown-protocol-packets-received>0</unknown-protocol-packets-received>
<input-errors>0</input-errors>
<crc-errors>0</crc-errors>
<input-overruns>0</input-overruns>
<framing-errors-received>0</framing-errors-received>
<input-ignored-packets>0</input-ignored-packets>
<input-aborts>0</input-aborts>
<output-errors>0</output-errors>
<output-underruns>0</output-underruns>
<output-buffer-failures>0</output-buffer-failures>
<output-buffers-swapped-out>0</output-buffers-swapped-out>
<applique>0</applique>
<resets>0</resets>
<carrier-transitions>0</carrier-transitions>
<availability-flag>0</availability-flag>
<last-data-time>1490888108</last-data-time>
<seconds-since-last-clear-counters>0</seconds-since-last-clear-counters>
<last-discontinuity-time>1489787915</last-discontinuity-time>
<seconds-since-packet-received>4294967295</seconds-since-packet-received>
<seconds-since-packet-sent>4294967295</seconds-since-packet-sent>
</full-interface-stats>
</interface-statistics>
<if-index>0</if-index>
</interface>
<interface>
<interface-name>GigabitEthernet0/0/0/1</interface-name>
<interface-handle>GigabitEthernet0/0/0/1</interface-handle>
<interface-type>IFT_GETHERNET</interface-type>
<hardware-type-string>GigabitEthernet</hardware-type-string>
<state>im-state-admin-down</state>
<line-state>im-state-admin-down</line-state>
<encapsulation>ether</encapsulation>
<encapsulation-type-string>ARPA</encapsulation-type-string>
<mtu>1514</mtu>
<is-l2-transport-enabled>false</is-l2-transport-enabled>
<state-transition-count>0</state-transition-count>
<last-state-transition-time>1100377</last-state-transition-time>
<is-dampening-enabled>false</is-dampening-enabled>
<speed>1000000</speed>
<duplexity>im-attr-duplex-full</duplexity>
<media-type>im-attr-media-other</media-type>
<link-type>im-attr-link-type-force</link-type>
<in-flow-control>im-attr-flow-control-off</in-flow-control>
<out-flow-control>im-attr-flow-control-off</out-flow-control>
<mac-address>
<address>52:54:00:ff:34:03</address>
</mac-address>
<burned-in-address>
<address>52:54:00:ff:34:03</address>
</burned-in-address>
<carrier-delay>
<carrier-delay-up>10</carrier-delay-up>
<carrier-delay-down>0</carrier-delay-down>
</carrier-delay>
<bandwidth>1000000</bandwidth>
<max-bandwidth>1000000</max-bandwidth>
<is-l2-looped>false</is-l2-looped>
<description></description>
<data-rates>
<input-data-rate>0</input-data-rate>
<input-packet-rate>0</input-packet-rate>
<output-data-rate>0</output-data-rate>
<output-packet-rate>0</output-packet-rate>
<peak-input-data-rate>0</peak-input-data-rate>
<peak-input-packet-rate>0</peak-input-packet-rate>
<peak-output-data-rate>0</peak-output-data-rate>
<peak-output-packet-rate>0</peak-output-packet-rate>
<bandwidth>1000000</bandwidth>
<load-interval>9</load-interval>
<output-load>0</output-load>
<input-load>0</input-load>
<reliability>255</reliability>
</data-rates>
<interface-statistics>
<stats-type>full</stats-type>
<full-interface-stats>
<packets-received>0</packets-received>
<bytes-received>0</bytes-received>
<packets-sent>0</packets-sent>
<bytes-sent>0</bytes-sent>
<multicast-packets-received>0</multicast-packets-received>
<broadcast-packets-received>0</broadcast-packets-received>
<multicast-packets-sent>0</multicast-packets-sent>
<broadcast-packets-sent>0</broadcast-packets-sent>
<output-drops>0</output-drops>
<output-queue-drops>0</output-queue-drops>
<input-drops>0</input-drops>
<input-queue-drops>0</input-queue-drops>
<runt-packets-received>0</runt-packets-received>
<giant-packets-received>0</giant-packets-received>
<throttled-packets-received>0</throttled-packets-received>
<parity-packets-received>0</parity-packets-received>
<unknown-protocol-packets-received>0</unknown-protocol-packets-received>
<input-errors>0</input-errors>
<crc-errors>0</crc-errors>
<input-overruns>0</input-overruns>
<framing-errors-received>0</framing-errors-received>
<input-ignored-packets>0</input-ignored-packets>
<input-aborts>0</input-aborts>
<output-errors>0</output-errors>
<output-underruns>0</output-underruns>
<output-buffer-failures>0</output-buffer-failures>
<output-buffers-swapped-out>0</output-buffers-swapped-out>
<applique>0</applique>
<resets>0</resets>
<carrier-transitions>0</carrier-transitions>
<availability-flag>0</availability-flag>
<last-data-time>1490888108</last-data-time>
<seconds-since-last-clear-counters>0</seconds-since-last-clear-counters>
<last-discontinuity-time>1489787869</last-discontinuity-time>
<seconds-since-packet-received>4294967295</seconds-since-packet-received>
<seconds-since-packet-sent>4294967295</seconds-since-packet-sent>
</full-interface-stats>
</interface-statistics>
<if-index>0</if-index>
</interface>
<interface>
<interface-name>GigabitEthernet0/0/0/2</interface-name>
<interface-handle>GigabitEthernet0/0/0/2</interface-handle>
<interface-type>IFT_GETHERNET</interface-type>
<hardware-type-string>GigabitEthernet</hardware-type-string>
<state>im-state-admin-down</state>
<line-state>im-state-admin-down</line-state>
<encapsulation>ether</encapsulation>
<encapsulation-type-string>ARPA</encapsulation-type-string>
<mtu>1514</mtu>
<is-l2-transport-enabled>false</is-l2-transport-enabled>
<state-transition-count>0</state-transition-count>
<last-state-transition-time>1100377</last-state-transition-time>
<is-dampening-enabled>false</is-dampening-enabled>
<speed>1000000</speed>
<duplexity>im-attr-duplex-full</duplexity>
<media-type>im-attr-media-other</media-type>
<link-type>im-attr-link-type-force</link-type>
<in-flow-control>im-attr-flow-control-off</in-flow-control>
<out-flow-control>im-attr-flow-control-off</out-flow-control>
<mac-address>
<address>52:54:00:ff:40:4e</address>
</mac-address>
<burned-in-address>
<address>52:54:00:ff:40:4e</address>
</burned-in-address>
<carrier-delay>
<carrier-delay-up>10</carrier-delay-up>
<carrier-delay-down>0</carrier-delay-down>
</carrier-delay>
<bandwidth>1000000</bandwidth>
<max-bandwidth>1000000</max-bandwidth>
<is-l2-looped>false</is-l2-looped>
<description></description>
<data-rates>
<input-data-rate>0</input-data-rate>
<input-packet-rate>0</input-packet-rate>
<output-data-rate>0</output-data-rate>
<output-packet-rate>0</output-packet-rate>
<peak-input-data-rate>0</peak-input-data-rate>
<peak-input-packet-rate>0</peak-input-packet-rate>
<peak-output-data-rate>0</peak-output-data-rate>
<peak-output-packet-rate>0</peak-output-packet-rate>
<bandwidth>1000000</bandwidth>
<load-interval>9</load-interval>
<output-load>0</output-load>
<input-load>0</input-load>
<reliability>255</reliability>
</data-rates>
<interface-statistics>
<stats-type>full</stats-type>
<full-interface-stats>
<packets-received>0</packets-received>
<bytes-received>0</bytes-received>
<packets-sent>0</packets-sent>
<bytes-sent>0</bytes-sent>
<multicast-packets-received>0</multicast-packets-received>
<broadcast-packets-received>0</broadcast-packets-received>
<multicast-packets-sent>0</multicast-packets-sent>
<broadcast-packets-sent>0</broadcast-packets-sent>
<output-drops>0</output-drops>
<output-queue-drops>0</output-queue-drops>
<input-drops>0</input-drops>
<input-queue-drops>0</input-queue-drops>
<runt-packets-received>0</runt-packets-received>
<giant-packets-received>0</giant-packets-received>
<throttled-packets-received>0</throttled-packets-received>
<parity-packets-received>0</parity-packets-received>
<unknown-protocol-packets-received>0</unknown-protocol-packets-received>
<input-errors>0</input-errors>
<crc-errors>0</crc-errors>
<input-overruns>0</input-overruns>
<framing-errors-received>0</framing-errors-received>
<input-ignored-packets>0</input-ignored-packets>
<input-aborts>0</input-aborts>
<output-errors>0</output-errors>
<output-underruns>0</output-underruns>
<output-buffer-failures>0</output-buffer-failures>
<output-buffers-swapped-out>0</output-buffers-swapped-out>
<applique>0</applique>
<resets>0</resets>
<carrier-transitions>0</carrier-transitions>
<availability-flag>0</availability-flag>
<last-data-time>1490888108</last-data-time>
<seconds-since-last-clear-counters>0</seconds-since-last-clear-counters>
<last-discontinuity-time>1489787869</last-discontinuity-time>
<seconds-since-packet-received>4294967295</seconds-since-packet-received>
<seconds-since-packet-sent>4294967295</seconds-since-packet-sent>
</full-interface-stats>
</interface-statistics>
<if-index>0</if-index>
</interface>
<interface>
<interface-name>GigabitEthernet0/0/0/3</interface-name>
<interface-handle>GigabitEthernet0/0/0/3</interface-handle>
<interface-type>IFT_GETHERNET</interface-type>
<hardware-type-string>GigabitEthernet</hardware-type-string>
<state>im-state-admin-down</state>
<line-state>im-state-admin-down</line-state>
<encapsulation>ether</encapsulation>
<encapsulation-type-string>ARPA</encapsulation-type-string>
<mtu>1514</mtu>
<is-l2-transport-enabled>false</is-l2-transport-enabled>
<state-transition-count>0</state-transition-count>
<last-state-transition-time>1100377</last-state-transition-time>
<is-dampening-enabled>false</is-dampening-enabled>
<speed>1000000</speed>
<duplexity>im-attr-duplex-full</duplexity>
<media-type>im-attr-media-other</media-type>
<link-type>im-attr-link-type-force</link-type>
<in-flow-control>im-attr-flow-control-off</in-flow-control>
<out-flow-control>im-attr-flow-control-off</out-flow-control>
<mac-address>
<address>52:54:00:ff:f9:5e</address>
</mac-address>
<burned-in-address>
<address>52:54:00:ff:f9:5e</address>
</burned-in-address>
<carrier-delay>
<carrier-delay-up>10</carrier-delay-up>
<carrier-delay-down>0</carrier-delay-down>
</carrier-delay>
<bandwidth>1000000</bandwidth>
<max-bandwidth>1000000</max-bandwidth>
<is-l2-looped>false</is-l2-looped>
<description></description>
<data-rates>
<input-data-rate>0</input-data-rate>
<input-packet-rate>0</input-packet-rate>
<output-data-rate>0</output-data-rate>
<output-packet-rate>0</output-packet-rate>
<peak-input-data-rate>0</peak-input-data-rate>
<peak-input-packet-rate>0</peak-input-packet-rate>
<peak-output-data-rate>0</peak-output-data-rate>
<peak-output-packet-rate>0</peak-output-packet-rate>
<bandwidth>1000000</bandwidth>
<load-interval>9</load-interval>
<output-load>0</output-load>
<input-load>0</input-load>
<reliability>255</reliability>
</data-rates>
<interface-statistics>
<stats-type>full</stats-type>
<full-interface-stats>
<packets-received>0</packets-received>
<bytes-received>0</bytes-received>
<packets-sent>0</packets-sent>
<bytes-sent>0</bytes-sent>
<multicast-packets-received>0</multicast-packets-received>
<broadcast-packets-received>0</broadcast-packets-received>
<multicast-packets-sent>0</multicast-packets-sent>
<broadcast-packets-sent>0</broadcast-packets-sent>
<output-drops>0</output-drops>
<output-queue-drops>0</output-queue-drops>
<input-drops>0</input-drops>
<input-queue-drops>0</input-queue-drops>
<runt-packets-received>0</runt-packets-received>
<giant-packets-received>0</giant-packets-received>
<throttled-packets-received>0</throttled-packets-received>
<parity-packets-received>0</parity-packets-received>
<unknown-protocol-packets-received>0</unknown-protocol-packets-received>
<input-errors>0</input-errors>
<crc-errors>0</crc-errors>
<input-overruns>0</input-overruns>
<framing-errors-received>0</framing-errors-received>
<input-ignored-packets>0</input-ignored-packets>
<input-aborts>0</input-aborts>
<output-errors>0</output-errors>
<output-underruns>0</output-underruns>
<output-buffer-failures>0</output-buffer-failures>
<output-buffers-swapped-out>0</output-buffers-swapped-out>
<applique>0</applique>
<resets>0</resets>
<carrier-transitions>0</carrier-transitions>
<availability-flag>0</availability-flag>
<last-data-time>1490888108</last-data-time>
<seconds-since-last-clear-counters>0</seconds-since-last-clear-counters>
<last-discontinuity-time>1489787869</last-discontinuity-time>
<seconds-since-packet-received>4294967295</seconds-since-packet-received>
<seconds-since-packet-sent>4294967295</seconds-since-packet-sent>
</full-interface-stats>
</interface-statistics>
<if-index>0</if-index>
</interface>
<interface>
<interface-name>GigabitEthernet0/0/0/4</interface-name>
<interface-handle>GigabitEthernet0/0/0/4</interface-handle>
<interface-type>IFT_GETHERNET</interface-type>
<hardware-type-string>GigabitEthernet</hardware-type-string>
<state>im-state-admin-down</state>
<line-state>im-state-admin-down</line-state>
<encapsulation>ether</encapsulation>
<encapsulation-type-string>ARPA</encapsulation-type-string>
<mtu>1514</mtu>
<is-l2-transport-enabled>false</is-l2-transport-enabled>
<state-transition-count>0</state-transition-count>
<last-state-transition-time>1100377</last-state-transition-time>
<is-dampening-enabled>false</is-dampening-enabled>
<speed>1000000</speed>
<duplexity>im-attr-duplex-full</duplexity>
<media-type>im-attr-media-other</media-type>
<link-type>im-attr-link-type-force</link-type>
<in-flow-control>im-attr-flow-control-off</in-flow-control>
<out-flow-control>im-attr-flow-control-off</out-flow-control>
<mac-address>
<address>52:54:00:ff:23:6b</address>
</mac-address>
<burned-in-address>
<address>52:54:00:ff:23:6b</address>
</burned-in-address>
<carrier-delay>
<carrier-delay-up>10</carrier-delay-up>
<carrier-delay-down>0</carrier-delay-down>
</carrier-delay>
<bandwidth>1000000</bandwidth>
<max-bandwidth>1000000</max-bandwidth>
<is-l2-looped>false</is-l2-looped>
<description></description>
<data-rates>
<input-data-rate>0</input-data-rate>
<input-packet-rate>0</input-packet-rate>
<output-data-rate>0</output-data-rate>
<output-packet-rate>0</output-packet-rate>
<peak-input-data-rate>0</peak-input-data-rate>
<peak-input-packet-rate>0</peak-input-packet-rate>
<peak-output-data-rate>0</peak-output-data-rate>
<peak-output-packet-rate>0</peak-output-packet-rate>
<bandwidth>1000000</bandwidth>
<load-interval>9</load-interval>
<output-load>0</output-load>
<input-load>0</input-load>
<reliability>255</reliability>
</data-rates>
<interface-statistics>
<stats-type>full</stats-type>
<full-interface-stats>
<packets-received>0</packets-received>
<bytes-received>0</bytes-received>
<packets-sent>0</packets-sent>
<bytes-sent>0</bytes-sent>
<multicast-packets-received>0</multicast-packets-received>
<broadcast-packets-received>0</broadcast-packets-received>
<multicast-packets-sent>0</multicast-packets-sent>
<broadcast-packets-sent>0</broadcast-packets-sent>
<output-drops>0</output-drops>
<output-queue-drops>0</output-queue-drops>
<input-drops>0</input-drops>
<input-queue-drops>0</input-queue-drops>
<runt-packets-received>0</runt-packets-received>
<giant-packets-received>0</giant-packets-received>
<throttled-packets-received>0</throttled-packets-received>
<parity-packets-received>0</parity-packets-received>
<unknown-protocol-packets-received>0</unknown-protocol-packets-received>
<input-errors>0</input-errors>
<crc-errors>0</crc-errors>
<input-overruns>0</input-overruns>
<framing-errors-received>0</framing-errors-received>
<input-ignored-packets>0</input-ignored-packets>
<input-aborts>0</input-aborts>
<output-errors>0</output-errors>
<output-underruns>0</output-underruns>
<output-buffer-failures>0</output-buffer-failures>
<output-buffers-swapped-out>0</output-buffers-swapped-out>
<applique>0</applique>
<resets>0</resets>
<carrier-transitions>0</carrier-transitions>
<availability-flag>0</availability-flag>
<last-data-time>1490888108</last-data-time>
<seconds-since-last-clear-counters>0</seconds-since-last-clear-counters>
<last-discontinuity-time>1489787869</last-discontinuity-time>
<seconds-since-packet-received>4294967295</seconds-since-packet-received>
<seconds-since-packet-sent>4294967295</seconds-since-packet-sent>
</full-interface-stats>
</interface-statistics>
<if-index>0</if-index>
</interface>
<interface>
<interface-name>GigabitEthernet0/0/0/5</interface-name>
<interface-handle>GigabitEthernet0/0/0/5</interface-handle>
<interface-type>IFT_GETHERNET</interface-type>
<hardware-type-string>GigabitEthernet</hardware-type-string>
<state>im-state-admin-down</state>
<line-state>im-state-admin-down</line-state>
<encapsulation>ether</encapsulation>
<encapsulation-type-string>ARPA</encapsulation-type-string>
<mtu>1514</mtu>
<is-l2-transport-enabled>false</is-l2-transport-enabled>
<state-transition-count>0</state-transition-count>
<last-state-transition-time>1100377</last-state-transition-time>
<is-dampening-enabled>false</is-dampening-enabled>
<speed>1000000</speed>
<duplexity>im-attr-duplex-full</duplexity>
<media-type>im-attr-media-other</media-type>
<link-type>im-attr-link-type-force</link-type>
<in-flow-control>im-attr-flow-control-off</in-flow-control>
<out-flow-control>im-attr-flow-control-off</out-flow-control>
<mac-address>
<address>52:54:00:ff:f5:54</address>
</mac-address>
<burned-in-address>
<address>52:54:00:ff:f5:54</address>
</burned-in-address>
<carrier-delay>
<carrier-delay-up>10</carrier-delay-up>
<carrier-delay-down>0</carrier-delay-down>
</carrier-delay>
<bandwidth>1000000</bandwidth>
<max-bandwidth>1000000</max-bandwidth>
<is-l2-looped>false</is-l2-looped>
<description></description>
<data-rates>
<input-data-rate>0</input-data-rate>
<input-packet-rate>0</input-packet-rate>
<output-data-rate>0</output-data-rate>
<output-packet-rate>0</output-packet-rate>
<peak-input-data-rate>0</peak-input-data-rate>
<peak-input-packet-rate>0</peak-input-packet-rate>
<peak-output-data-rate>0</peak-output-data-rate>
<peak-output-packet-rate>0</peak-output-packet-rate>
<bandwidth>1000000</bandwidth>
<load-interval>9</load-interval>
<output-load>0</output-load>
<input-load>0</input-load>
<reliability>255</reliability>
</data-rates>
<interface-statistics>
<stats-type>full</stats-type>
<full-interface-stats>
<packets-received>0</packets-received>
<bytes-received>0</bytes-received>
<packets-sent>0</packets-sent>
<bytes-sent>0</bytes-sent>
<multicast-packets-received>0</multicast-packets-received>
<broadcast-packets-received>0</broadcast-packets-received>
<multicast-packets-sent>0</multicast-packets-sent>
<broadcast-packets-sent>0</broadcast-packets-sent>
<output-drops>0</output-drops>
<output-queue-drops>0</output-queue-drops>
<input-drops>0</input-drops>
<input-queue-drops>0</input-queue-drops>
<runt-packets-received>0</runt-packets-received>
<giant-packets-received>0</giant-packets-received>
<throttled-packets-received>0</throttled-packets-received>
<parity-packets-received>0</parity-packets-received>
<unknown-protocol-packets-received>0</unknown-protocol-packets-received>
<input-errors>0</input-errors>
<crc-errors>0</crc-errors>
<input-overruns>0</input-overruns>
<framing-errors-received>0</framing-errors-received>
<input-ignored-packets>0</input-ignored-packets>
<input-aborts>0</input-aborts>
<output-errors>0</output-errors>
<output-underruns>0</output-underruns>
<output-buffer-failures>0</output-buffer-failures>
<output-buffers-swapped-out>0</output-buffers-swapped-out>
<applique>0</applique>
<resets>0</resets>
<carrier-transitions>0</carrier-transitions>
<availability-flag>0</availability-flag>
<last-data-time>1490888108</last-data-time>
<seconds-since-last-clear-counters>0</seconds-since-last-clear-counters>
<last-discontinuity-time>1489787869</last-discontinuity-time>
<seconds-since-packet-received>4294967295</seconds-since-packet-received>
<seconds-since-packet-sent>4294967295</seconds-since-packet-sent>
</full-interface-stats>
</interface-statistics>
<if-index>0</if-index>
</interface>
<interface>
<interface-name>GigabitEthernet0/0/0/6</interface-name>
<interface-handle>GigabitEthernet0/0/0/6</interface-handle>
<interface-type>IFT_GETHERNET</interface-type>
<hardware-type-string>GigabitEthernet</hardware-type-string>
<state>im-state-admin-down</state>
<line-state>im-state-admin-down</line-state>
<encapsulation>ether</encapsulation>
<encapsulation-type-string>ARPA</encapsulation-type-string>
<mtu>1514</mtu>
<is-l2-transport-enabled>false</is-l2-transport-enabled>
<state-transition-count>0</state-transition-count>
<last-state-transition-time>1100377</last-state-transition-time>
<is-dampening-enabled>false</is-dampening-enabled>
<speed>1000000</speed>
<duplexity>im-attr-duplex-full</duplexity>
<media-type>im-attr-media-other</media-type>
<link-type>im-attr-link-type-force</link-type>
<in-flow-control>im-attr-flow-control-off</in-flow-control>
<out-flow-control>im-attr-flow-control-off</out-flow-control>
<mac-address>
<address>52:54:00:ff:d2:b1</address>
</mac-address>
<burned-in-address>
<address>52:54:00:ff:d2:b1</address>
</burned-in-address>
<carrier-delay>
<carrier-delay-up>10</carrier-delay-up>
<carrier-delay-down>0</carrier-delay-down>
</carrier-delay>
<bandwidth>1000000</bandwidth>
<max-bandwidth>1000000</max-bandwidth>
<is-l2-looped>false</is-l2-looped>
<description></description>
<data-rates>
<input-data-rate>0</input-data-rate>
<input-packet-rate>0</input-packet-rate>
<output-data-rate>0</output-data-rate>
<output-packet-rate>0</output-packet-rate>
<peak-input-data-rate>0</peak-input-data-rate>
<peak-input-packet-rate>0</peak-input-packet-rate>
<peak-output-data-rate>0</peak-output-data-rate>
<peak-output-packet-rate>0</peak-output-packet-rate>
<bandwidth>1000000</bandwidth>
<load-interval>9</load-interval>
<output-load>0</output-load>
<input-load>0</input-load>
<reliability>255</reliability>
</data-rates>
<interface-statistics>
<stats-type>full</stats-type>
<full-interface-stats>
<packets-received>0</packets-received>
<bytes-received>0</bytes-received>
<packets-sent>0</packets-sent>
<bytes-sent>0</bytes-sent>
<multicast-packets-received>0</multicast-packets-received>
<broadcast-packets-received>0</broadcast-packets-received>
<multicast-packets-sent>0</multicast-packets-sent>
<broadcast-packets-sent>0</broadcast-packets-sent>
<output-drops>0</output-drops>
<output-queue-drops>0</output-queue-drops>
<input-drops>0</input-drops>
<input-queue-drops>0</input-queue-drops>
<runt-packets-received>0</runt-packets-received>
<giant-packets-received>0</giant-packets-received>
<throttled-packets-received>0</throttled-packets-received>
<parity-packets-received>0</parity-packets-received>
<unknown-protocol-packets-received>0</unknown-protocol-packets-received>
<input-errors>0</input-errors>
<crc-errors>0</crc-errors>
<input-overruns>0</input-overruns>
<framing-errors-received>0</framing-errors-received>
<input-ignored-packets>0</input-ignored-packets>
<input-aborts>0</input-aborts>
<output-errors>0</output-errors>
<output-underruns>0</output-underruns>
<output-buffer-failures>0</output-buffer-failures>
<output-buffers-swapped-out>0</output-buffers-swapped-out>
<applique>0</applique>
<resets>0</resets>
<carrier-transitions>0</carrier-transitions>
<availability-flag>0</availability-flag>
<last-data-time>1490888108</last-data-time>
<seconds-since-last-clear-counters>0</seconds-since-last-clear-counters>
<last-discontinuity-time>1489787869</last-discontinuity-time>
<seconds-since-packet-received>4294967295</seconds-since-packet-received>
<seconds-since-packet-sent>4294967295</seconds-since-packet-sent>
</full-interface-stats>
</interface-statistics>
<if-index>0</if-index>
</interface>
<interface>
<interface-name>MgmtEth0/0/CPU0/0</interface-name>
<interface-handle>MgmtEth0/0/CPU0/0</interface-handle>
<interface-type>IFT_ETHERNET</interface-type>
<hardware-type-string>Management Ethernet</hardware-type-string>
<state>im-state-up</state>
<line-state>im-state-up</line-state>
<encapsulation>ether</encapsulation>
<encapsulation-type-string>ARPA</encapsulation-type-string>
<mtu>1514</mtu>
<is-l2-transport-enabled>false</is-l2-transport-enabled>
<state-transition-count>1</state-transition-count>
<last-state-transition-time>1100222</last-state-transition-time>
<is-dampening-enabled>false</is-dampening-enabled>
<speed>1000000</speed>
<duplexity>im-attr-duplex-unknown</duplexity>
<media-type>im-attr-media-other</media-type>
<link-type>im-attr-link-type-auto</link-type>
<in-flow-control>im-attr-flow-control-off</in-flow-control>
<out-flow-control>im-attr-flow-control-off</out-flow-control>
<mac-address>
<address>52:54:00:ff:99:42</address>
</mac-address>
<burned-in-address>
<address>52:54:00:ff:99:42</address>
</burned-in-address>
<carrier-delay>
<carrier-delay-up>10</carrier-delay-up>
<carrier-delay-down>0</carrier-delay-down>
</carrier-delay>
<bandwidth>1000000</bandwidth>
<max-bandwidth>1000000</max-bandwidth>
<is-l2-looped>false</is-l2-looped>
<description></description>
<arp-information>
<arp-timeout>14400</arp-timeout>
<arp-type-name>ARPA</arp-type-name>
<arp-is-learning-disabled>false</arp-is-learning-disabled>
</arp-information>
<ip-information>
<ip-address>10.85.112.123</ip-address>
<subnet-mask-length>25</subnet-mask-length>
</ip-information>
<data-rates>
<input-data-rate>192</input-data-rate>
<input-packet-rate>392</input-packet-rate>
<output-data-rate>70</output-data-rate>
<output-packet-rate>105</output-packet-rate>
<peak-input-data-rate>0</peak-input-data-rate>
<peak-input-packet-rate>0</peak-input-packet-rate>
<peak-output-data-rate>0</peak-output-data-rate>
<peak-output-packet-rate>0</peak-output-packet-rate>
<bandwidth>1000000</bandwidth>
<load-interval>9</load-interval>
<output-load>0</output-load>
<input-load>0</input-load>
<reliability>255</reliability>
</data-rates>
<interface-statistics>
<stats-type>full</stats-type>
<full-interface-stats>
<packets-received>228836760</packets-received>
<bytes-received>13447429857</bytes-received>
<packets-sent>56486840</packets-sent>
<bytes-sent>4095136965</bytes-sent>
<multicast-packets-received>1042005</multicast-packets-received>
<broadcast-packets-received>174752</broadcast-packets-received>
<multicast-packets-sent>0</multicast-packets-sent>
<broadcast-packets-sent>21</broadcast-packets-sent>
<output-drops>0</output-drops>
<output-queue-drops>0</output-queue-drops>
<input-drops>0</input-drops>
<input-queue-drops>0</input-queue-drops>
<runt-packets-received>0</runt-packets-received>
<giant-packets-received>0</giant-packets-received>
<throttled-packets-received>0</throttled-packets-received>
<parity-packets-received>0</parity-packets-received>
<unknown-protocol-packets-received>0</unknown-protocol-packets-received>
<input-errors>0</input-errors>
<crc-errors>0</crc-errors>
<input-overruns>0</input-overruns>
<framing-errors-received>0</framing-errors-received>
<input-ignored-packets>0</input-ignored-packets>
<input-aborts>0</input-aborts>
<output-errors>0</output-errors>
<output-underruns>0</output-underruns>
<output-buffer-failures>0</output-buffer-failures>
<output-buffers-swapped-out>0</output-buffers-swapped-out>
<applique>0</applique>
<resets>0</resets>
<carrier-transitions>1</carrier-transitions>
<availability-flag>0</availability-flag>
<last-data-time>1490888108</last-data-time>
<seconds-since-last-clear-counters>0</seconds-since-last-clear-counters>
<last-discontinuity-time>1489787864</last-discontinuity-time>
<seconds-since-packet-received>0</seconds-since-packet-received>
<seconds-since-packet-sent>0</seconds-since-packet-sent>
</full-interface-stats>
</interface-statistics>
<if-index>0</if-index>
</interface>
<interface>
<interface-name>Null0</interface-name>
<interface-handle>Null0</interface-handle>
<interface-type>IFT_NULL</interface-type>
<hardware-type-string>Null interface</hardware-type-string>
<state>im-state-up</state>
<line-state>im-state-up</line-state>
<encapsulation>null</encapsulation>
<encapsulation-type-string>Null</encapsulation-type-string>
<mtu>1500</mtu>
<is-l2-transport-enabled>false</is-l2-transport-enabled>
<state-transition-count>1</state-transition-count>
<last-state-transition-time>1100254</last-state-transition-time>
<is-dampening-enabled>false</is-dampening-enabled>
<bandwidth>0</bandwidth>
<max-bandwidth>0</max-bandwidth>
<is-l2-looped>false</is-l2-looped>
<description></description>
<data-rates>
<input-data-rate>0</input-data-rate>
<input-packet-rate>0</input-packet-rate>
<output-data-rate>0</output-data-rate>
<output-packet-rate>0</output-packet-rate>
<peak-input-data-rate>0</peak-input-data-rate>
<peak-input-packet-rate>0</peak-input-packet-rate>
<peak-output-data-rate>0</peak-output-data-rate>
<peak-output-packet-rate>0</peak-output-packet-rate>
<bandwidth>0</bandwidth>
<load-interval>9</load-interval>
<output-load>0</output-load>
<input-load>0</input-load>
<reliability>255</reliability>
</data-rates>
<interface-statistics>
<stats-type>full</stats-type>
<full-interface-stats>
<packets-received>0</packets-received>
<bytes-received>0</bytes-received>
<packets-sent>0</packets-sent>
<bytes-sent>0</bytes-sent>
<multicast-packets-received>0</multicast-packets-received>
<broadcast-packets-received>0</broadcast-packets-received>
<multicast-packets-sent>0</multicast-packets-sent>
<broadcast-packets-sent>0</broadcast-packets-sent>
<output-drops>0</output-drops>
<output-queue-drops>0</output-queue-drops>
<input-drops>0</input-drops>
<input-queue-drops>0</input-queue-drops>
<runt-packets-received>0</runt-packets-received>
<giant-packets-received>0</giant-packets-received>
<throttled-packets-received>0</throttled-packets-received>
<parity-packets-received>0</parity-packets-received>
<unknown-protocol-packets-received>0</unknown-protocol-packets-received>
<input-errors>0</input-errors>
<crc-errors>0</crc-errors>
<input-overruns>0</input-overruns>
<framing-errors-received>0</framing-errors-received>
<input-ignored-packets>0</input-ignored-packets>
<input-aborts>0</input-aborts>
<output-errors>0</output-errors>
<output-underruns>0</output-underruns>
<output-buffer-failures>0</output-buffer-failures>
<output-buffers-swapped-out>0</output-buffers-swapped-out>
<applique>0</applique>
<resets>0</resets>
<carrier-transitions>0</carrier-transitions>
<availability-flag>0</availability-flag>
<last-data-time>1490888108</last-data-time>
<seconds-since-last-clear-counters>0</seconds-since-last-clear-counters>
<last-discontinuity-time>1489787884</last-discontinuity-time>
<seconds-since-packet-received>4294967295</seconds-since-packet-received>
<seconds-since-packet-sent>4294967295</seconds-since-packet-sent>
</full-interface-stats>
</interface-statistics>
<if-index>0</if-index>
</interface>
</interface-xr>
</interfaces>
</data>
''')
golden_output = {'get.return_value': etree_holder()}
def test_golden(self):
self.device = Mock(**self.golden_output)
intf_obj = ShowEthernetTags(device=self.device)
intf_obj.context = Context.yang.value.split()
parsed_output = intf_obj.parse()
self.assertEqual(parsed_output,self.golden_parsed_output)
empty_parsed_output = {'interface': {'GigabitEthernet0/0/0/0': {'sub_interface': {'GigabitEthernet0/0/0/0.501': {}}}}}
class empty_etree_holder():
def __init__(self):
self.data = ET.fromstring('''
<data>
<interfaces xmlns="http://cisco.com/ns/yang/Cisco-IOS-XR-pfi-im-cmd-oper">
<interface-xr>
<interface>
<interface-name>GigabitEthernet0/0/0/0.501</interface-name>
<interface-handle>GigabitEthernet0/0/0/0.501</interface-handle>
<interface-type>IFT_VLAN_SUBIF</interface-type>
<hardware-type-string>VLAN sub-interface(s)</hardware-type-string>
<state>im-state-up</state>
<line-state>im-state-up</line-state>
<encapsulation>dot1q</encapsulation>
<encapsulation-type-string>802.1Q</encapsulation-type-string>
<mtu>1522</mtu>
<is-l2-transport-enabled>false</is-l2-transport-enabled>
<state-transition-count>1</state-transition-count>
<last-state-transition-time>1100222</last-state-transition-time>
<is-dampening-enabled>false</is-dampening-enabled>
<speed>1000000</speed>
<mac-address>
<address>52:54:00:ff:50:0e</address>
</mac-address>
<carrier-delay>
<carrier-delay-up>0</carrier-delay-up>
<carrier-delay-down>0</carrier-delay-down>
</carrier-delay>
<bandwidth>1000000</bandwidth>
<max-bandwidth>1000000</max-bandwidth>
<is-l2-looped>false</is-l2-looped>
<parent-interface-name>GigabitEthernet0/0/0/0</parent-interface-name>
<description></description>
<encapsulation-information>
</encapsulation-information>
<data-rates>
<input-data-rate>0</input-data-rate>
<input-packet-rate>0</input-packet-rate>
<output-data-rate>0</output-data-rate>
<output-packet-rate>0</output-packet-rate>
<peak-input-data-rate>0</peak-input-data-rate>
<peak-input-packet-rate>0</peak-input-packet-rate>
<peak-output-data-rate>0</peak-output-data-rate>
<peak-output-packet-rate>0</peak-output-packet-rate>
<bandwidth>1000000</bandwidth>
<load-interval>9</load-interval>
<output-load>0</output-load>
<input-load>0</input-load>
<reliability>255</reliability>
</data-rates>
<interface-statistics>
<stats-type>full</stats-type>
<full-interface-stats>
<packets-received>0</packets-received>
<bytes-received>0</bytes-received>
<packets-sent>0</packets-sent>
<bytes-sent>0</bytes-sent>
<multicast-packets-received>0</multicast-packets-received>
<broadcast-packets-received>0</broadcast-packets-received>
<multicast-packets-sent>0</multicast-packets-sent>
<broadcast-packets-sent>0</broadcast-packets-sent>
<output-drops>0</output-drops>
<output-queue-drops>0</output-queue-drops>
<input-drops>0</input-drops>
<input-queue-drops>0</input-queue-drops>
<runt-packets-received>0</runt-packets-received>
<giant-packets-received>0</giant-packets-received>
<throttled-packets-received>0</throttled-packets-received>
<parity-packets-received>0</parity-packets-received>
<unknown-protocol-packets-received>0</unknown-protocol-packets-received>
<input-errors>0</input-errors>
<crc-errors>0</crc-errors>
<input-overruns>0</input-overruns>
<framing-errors-received>0</framing-errors-received>
<input-ignored-packets>0</input-ignored-packets>
<input-aborts>0</input-aborts>
<output-errors>0</output-errors>
<output-underruns>0</output-underruns>
<output-buffer-failures>0</output-buffer-failures>
<output-buffers-swapped-out>0</output-buffers-swapped-out>
<applique>0</applique>
<resets>0</resets>
<carrier-transitions>0</carrier-transitions>
<availability-flag>0</availability-flag>
<last-data-time>1490888108</last-data-time>
<seconds-since-last-clear-counters>0</seconds-since-last-clear-counters>
<last-discontinuity-time>1489787915</last-discontinuity-time>
<seconds-since-packet-received>4294967295</seconds-since-packet-received>
<seconds-since-packet-sent>4294967295</seconds-since-packet-sent>
</full-interface-stats>
</interface-statistics>
<if-index>0</if-index>
</interface>
</interface-xr>
</interfaces>
</data>
''')
empty_output = {'get.return_value': empty_etree_holder()}
def test_empty(self):
self.device1 = Mock(**self.empty_output)
intf_obj = ShowEthernetTags(device=self.device1)
intf_obj.context = Context.yang.value.split()
parsed_output = intf_obj.parse()
self.assertEqual(parsed_output,self.empty_parsed_output)
if __name__ == '__main__':
unittest.main()
| 52.58473
| 172
| 0.575961
| 6,055
| 56,476
| 5.359207
| 0.042444
| 0.081356
| 0.041911
| 0.013559
| 0.954299
| 0.948351
| 0.939784
| 0.929831
| 0.926903
| 0.925054
| 0
| 0.044547
| 0.281748
| 56,476
| 1,074
| 173
| 52.58473
| 0.755424
| 0
| 0
| 0.910293
| 0
| 0.029273
| 0.966092
| 0.655453
| 0
| 0
| 0
| 0
| 0.001889
| 1
| 0.003777
| false
| 0
| 0.007554
| 0
| 0.020774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
9da2a40aaf92cd389d0f1554057abd6bf6bed7b7
| 1,284
|
py
|
Python
|
two_sum/two_sum_test.py
|
kevinzen/learning
|
148129a1ec48e86e74c6ed244ba50ab682ebf00b
|
[
"MIT"
] | null | null | null |
two_sum/two_sum_test.py
|
kevinzen/learning
|
148129a1ec48e86e74c6ed244ba50ab682ebf00b
|
[
"MIT"
] | null | null | null |
two_sum/two_sum_test.py
|
kevinzen/learning
|
148129a1ec48e86e74c6ed244ba50ab682ebf00b
|
[
"MIT"
] | null | null | null |
import unittest
from two_sum.solution import Solution
class MyTestCase(unittest.TestCase):
def test_two_sum(self):
s = Solution()
nums = [2,7,11,15]
target = 9
result = s.twoSum(nums, target)
self.assertEqual(result, [0,1])
nums = [-1,-2,-3,-4,-5]
target = -8
result = s.twoSum(nums, target)
self.assertEqual(result, [2,4])
def test_two_sum_two_pass_hash(self):
s = Solution()
nums = [2,7,11,15]
target = 9
result = s.twoSumTwoPassHash(nums, target)
self.assertEqual(result, [0,1])
nums = [-1,-2,-3,-4,-5]
target = -8
result = s.twoSumTwoPassHash(nums, target)
self.assertEqual(result, [2,4])
def test_two_sum_one_pass_hash(self):
s = Solution()
# nums = [2,7,11,15]
# target = 9
# result = s.twoSumOnePassHash(nums, target)
# self.assertEqual(result, [0,1])
#
#
# nums = [-1,-2,-3,-4,-5]
# target = -8
# result = s.twoSumOnePassHash(nums, target)
# self.assertEqual(result, [2,4])
#
nums = [3,3]
target = 6
result = s.twoSumOnePassHash(nums, target)
self.assertEqual(result, [0,1])
| 22.137931
| 52
| 0.528816
| 159
| 1,284
| 4.188679
| 0.220126
| 0.073574
| 0.147147
| 0.262763
| 0.836336
| 0.836336
| 0.836336
| 0.833333
| 0.692192
| 0.692192
| 0
| 0.06474
| 0.326324
| 1,284
| 57
| 53
| 22.526316
| 0.705202
| 0.167445
| 0
| 0.689655
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172414
| 1
| 0.103448
| false
| 0.172414
| 0.068966
| 0
| 0.206897
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
d1cc16d682e4eca40ad13070671c8d9cf599a162
| 102
|
py
|
Python
|
oops/#040_garbageCollection.py
|
krishankansal/PythonPrograms
|
6d4d989068195b8c8dd9d71cf4f920fef1177cf2
|
[
"MIT"
] | null | null | null |
oops/#040_garbageCollection.py
|
krishankansal/PythonPrograms
|
6d4d989068195b8c8dd9d71cf4f920fef1177cf2
|
[
"MIT"
] | null | null | null |
oops/#040_garbageCollection.py
|
krishankansal/PythonPrograms
|
6d4d989068195b8c8dd9d71cf4f920fef1177cf2
|
[
"MIT"
] | null | null | null |
import gc
print(gc.isenabled())
gc.disable()
print(gc.isenabled())
gc.enable()
print(gc.isenabled())
| 12.75
| 21
| 0.715686
| 15
| 102
| 4.866667
| 0.4
| 0.287671
| 0.657534
| 0.493151
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.078431
| 102
| 7
| 22
| 14.571429
| 0.776596
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.166667
| 0
| 0.166667
| 0.5
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
ae06d179d5b9be423275a44eeee9ffa91a442613
| 22
|
py
|
Python
|
randomstate/prng/mt19937/__init__.py
|
bashtage/ng-numpy-randomstate
|
b397db9cb8688b291fc40071ab043009dfa05a85
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 43
|
2016-02-11T03:38:16.000Z
|
2022-02-03T10:00:15.000Z
|
randomstate/prng/mt19937/__init__.py
|
bashtage/pcg-python
|
b397db9cb8688b291fc40071ab043009dfa05a85
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 31
|
2015-12-26T19:47:36.000Z
|
2018-12-10T15:55:46.000Z
|
randomstate/prng/mt19937/__init__.py
|
bashtage/ng-numpy-randomstate
|
b397db9cb8688b291fc40071ab043009dfa05a85
|
[
"Apache-2.0",
"BSD-3-Clause"
] | 11
|
2016-04-28T02:00:38.000Z
|
2020-08-07T10:33:10.000Z
|
from .mt19937 import *
| 22
| 22
| 0.772727
| 3
| 22
| 5.666667
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.263158
| 0.136364
| 22
| 1
| 22
| 22
| 0.631579
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ae479698d6297559d1027e1551aa18bc9d3b6779
| 25,046
|
py
|
Python
|
code/deep-high-resolution-net.pytorch/lib/core/function.py
|
SomaKishimoto/AwA-Pose
|
b9877d538af25d07a6e2f9fa0b5aa2fdf544db99
|
[
"MIT"
] | 12
|
2021-09-01T16:44:29.000Z
|
2022-03-25T03:36:47.000Z
|
code/deep-high-resolution-net.pytorch/lib/core/function.py
|
SomaKishimoto/AwA-Pose
|
b9877d538af25d07a6e2f9fa0b5aa2fdf544db99
|
[
"MIT"
] | 1
|
2021-09-01T15:07:20.000Z
|
2021-09-01T16:44:09.000Z
|
code/deep-high-resolution-net.pytorch/lib/core/function.py
|
SomaKishimoto/AwA-Pose
|
b9877d538af25d07a6e2f9fa0b5aa2fdf544db99
|
[
"MIT"
] | 1
|
2021-09-08T07:34:20.000Z
|
2021-09-08T07:34:20.000Z
|
# ------------------------------------------------------------------------------
# Copyright (c) Microsoft
# Licensed under the MIT License.
# Written by Bin Xiao (Bin.Xiao@microsoft.com)
# ------------------------------------------------------------------------------
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import time
import logging
import os
import numpy as np
import torch
from core.evaluate import accuracy, accuracy_bbox
from core.inference import get_final_preds
from utils.transforms import flip_back
from utils.vis import save_debug_images, save_debug_images_w_bbox
logger = logging.getLogger(__name__)
def train(config, train_loader, model, criterion, optimizer, epoch,
output_dir, tb_log_dir, writer_dict):
batch_time = AverageMeter()
data_time = AverageMeter()
losses = AverageMeter()
acc = AverageMeter()
# switch to train mode
model.train()
end = time.time()
for i, (input, target, target_weight, meta) in enumerate(train_loader):
# measure data loading time
data_time.update(time.time() - end)
# compute output
outputs = model(input)
target = target.cuda(non_blocking=True)
target_weight = target_weight.cuda(non_blocking=True)
if isinstance(outputs, list):
loss = criterion(outputs[0], target, target_weight)
for output in outputs[1:]:
loss += criterion(output, target, target_weight)
else:
output = outputs
loss = criterion(output, target, target_weight)
# loss = criterion(output, target, target_weight)
# compute gradient and do update step
optimizer.zero_grad()
loss.backward()
optimizer.step()
# measure accuracy and record loss
losses.update(loss.item(), input.size(0))
#_, avg_acc, cnt, pred = accuracy(output.detach().cpu().numpy(), target.detach().cpu().numpy())
bbox = meta['bbox'].numpy()
bbox_w = bbox[:, 1,0] - bbox[:, 0,0]
bbox_h = bbox[:, 1,1] - bbox[:, 0,1]
diagonal = np.sqrt(bbox_w * bbox_w + bbox_h * bbox_h)
_, avg_acc, cnt, pred = accuracy_bbox(output.detach().cpu().numpy(),
target.detach().cpu().numpy(), scale = None, thr=0.001*diagonal)
acc.update(avg_acc, cnt)
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
if i % config.PRINT_FREQ == 0:
msg = 'Epoch: [{0}][{1}/{2}]\t' \
'Time {batch_time.val:.3f}s ({batch_time.avg:.3f}s)\t' \
'Speed {speed:.1f} samples/s\t' \
'Data {data_time.val:.3f}s ({data_time.avg:.3f}s)\t' \
'Loss {loss.val:.5f} ({loss.avg:.5f})\t' \
'Accuracy {acc.val:.3f} ({acc.avg:.3f})'.format(
epoch, i, len(train_loader), batch_time=batch_time,
speed=input.size(0)/batch_time.val,
data_time=data_time, loss=losses, acc=acc)
logger.info(msg)
writer = writer_dict['writer']
global_steps = writer_dict['train_global_steps']
writer.add_scalar('train_loss', losses.val, global_steps)
writer.add_scalar('train_acc', acc.val, global_steps)
writer_dict['train_global_steps'] = global_steps + 1
prefix = '{}_{}'.format(os.path.join(output_dir, 'train'), i)
save_debug_images(config, input, meta, target, pred*4, output,
prefix)
def validate_old(config, val_loader, val_dataset, model, criterion, output_dir,
tb_log_dir, writer_dict=None, thresh = 0.0005, save_pickle=True):
batch_time = AverageMeter()
losses = AverageMeter()
acc = AverageMeter()
# switch to evaluate mode
model.eval()
num_samples = len(val_dataset)
all_preds = np.zeros(
(num_samples, config.MODEL.NUM_JOINTS, 3),
dtype=np.float32
)
all_boxes = np.zeros((num_samples, 6))
all_orig_boxes = np.zeros((num_samples, 2,2))
image_path = []
filenames = []
imgnums = []
idx = 0
ind_accs = []
animal_list = [
'antelope',
'bobcat',
'buffalo',
'chihuahua',
'collie',
'cow',
'dalmatian',
'deer',
'elephant',
'fox',
'german+shepherd',
'giant+panda',
'giraffe',
'grizzly+bear',
'hippopotamus',
'horse',
'leopard',
'lion',
'moose',
'otter',
'ox',
'persian+cat',
'pig',
'polar+bear',
'rabbit',
'raccoon',
'rhinoceros',
'sheep',
'siamese+cat',
'skunk',
'squirrel',
'tiger',
'weasel',
'wolf',
'zebra']
animal_ids = list(range(8,36))
crnt_animal_id = 8
with torch.no_grad():
end = time.time()
for i, (input, target, target_weight, meta) in enumerate(val_loader):
# compute output
if crnt_animal_id > 0 and animal_list[crnt_animal_id - 1] not in meta['image'][0]:
continue
outputs = model(input)
if isinstance(outputs, list):
output = outputs[-1]
else:
output = outputs
if config.TEST.FLIP_TEST:
# this part is ugly, because pytorch has not supported negative index
# input_flipped = model(input[:, :, :, ::-1])
input_flipped = np.flip(input.cpu().numpy(), 3).copy()
input_flipped = torch.from_numpy(input_flipped).cuda()
outputs_flipped = model(input_flipped)
if isinstance(outputs_flipped, list):
output_flipped = outputs_flipped[-1]
else:
output_flipped = outputs_flipped
output_flipped = flip_back(output_flipped.cpu().numpy(),
val_dataset.flip_pairs)
output_flipped = torch.from_numpy(output_flipped.copy()).cuda()
# feature is not aligned, shift flipped heatmap for higher accuracy
if config.TEST.SHIFT_HEATMAP:
output_flipped[:, :, :, 1:] = \
output_flipped.clone()[:, :, :, 0:-1]
output = (output + output_flipped) * 0.5
target = target.cuda(non_blocking=True)
target_weight = target_weight.cuda(non_blocking=True)
loss = criterion(output, target, target_weight)
c = meta['center'].numpy()
s = meta['scale'].numpy()
score = meta['score'].numpy()
bbox = meta['bbox'].numpy()
#import ipdb; ipdb.set_trace()
#exit(0)
num_images = input.size(0)
# measure accuracy and record loss
losses.update(loss.item(), num_images)
#w_h_original = (s[:, [1, 0]] * 200.0) / 1.25
#diagonal = np.sqrt(s[:, 0] * s[:, 0] + s[:, 1] * s[:, 1])
bbox_w = bbox[:, 1,0] - bbox[:, 0,0]
bbox_h = bbox[:, 1,1] - bbox[:, 0,1]
diagonal = np.sqrt(bbox_w * bbox_w + bbox_h * bbox_h)
ind_acc, avg_acc, cnt, pred = accuracy_bbox(output.cpu().numpy(),
target.cpu().numpy(), scale = s[:, [1, 0]], thr=thresh*diagonal) # swapping (w,h) to (h,w)
ind_accs.append(ind_acc[1:])
#print(avg_acc)
acc.update(avg_acc, cnt)
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
preds, maxvals = get_final_preds(
config, output.clone().cpu().numpy(), c, s)
all_preds[idx:idx + num_images, :, 0:2] = preds[:, :, 0:2]
all_preds[idx:idx + num_images, :, 2:3] = maxvals
# double check this all_boxes parts
all_boxes[idx:idx + num_images, 0:2] = c[:, 0:2]
all_boxes[idx:idx + num_images, 2:4] = s[:, 0:2]
all_boxes[idx:idx + num_images, 4] = np.prod(s*200, 1)
all_boxes[idx:idx + num_images, 5] = score
image_path.extend(meta['image'])
#saving pickles
#if save_pickle:
# save_pred_pickle(config, input, meta, target, pred*4, output, prefix)
idx += num_images
if False:#i % config.PRINT_FREQ == 0:
msg = 'Test: [{0}/{1}]\t' \
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t' \
'Loss {loss.val:.4f} ({loss.avg:.4f})\t' \
'Accuracy {acc.val:.3f} ({acc.avg:.3f})'.format(
i, len(val_loader), batch_time=batch_time,
loss=losses, acc=acc)
logger.info(msg)
prefix = '{}_{}'.format(
os.path.join(output_dir, 'val'), i
)
#save_debug_images(config, input, meta, target, pred*4, output, prefix)
save_debug_images_w_bbox(config, input, meta, target, pred*4, output, prefix)
#import ipdb; ipdb.set_trace()
#exit(0)
if False:
name_values, perf_indicator = val_dataset.evaluate(
config, all_preds, output_dir, all_boxes, image_path,
filenames, imgnums
)
model_name = config.MODEL.NAME
if isinstance(name_values, list):
for name_value in name_values:
_print_name_value(name_value, model_name)
else:
_print_name_value(name_values, model_name)
if writer_dict:
writer = writer_dict['writer']
global_steps = writer_dict['valid_global_steps']
writer.add_scalar(
'valid_loss',
losses.avg,
global_steps
)
writer.add_scalar(
'valid_acc',
acc.avg,
global_steps
)
if isinstance(name_values, list):
for name_value in name_values:
writer.add_scalars(
'valid',
dict(name_value),
global_steps
)
else:
writer.add_scalars(
'valid',
dict(name_values),
global_steps
)
writer_dict['valid_global_steps'] = global_steps + 1
else:
perf_indicator = 100
#return perf_indicator
print('Average PCK @ '+ str(thresh) +'= ', acc.avg)
ind_acc = [0]*len(ind_acc[1:])
ind_acc_count = [0]*len(ind_acc)
for ind_batch in ind_accs:
for i, val in enumerate(ind_batch):
if val >= 0:
ind_acc_count[i] += 1
ind_acc[i] += val
for i in range(len(ind_acc)):
#import ipdb; ipdb.set_trace()
#exit(0)
if ind_acc_count[i] == 0:
ind_acc[i] = -1
else:
ind_acc[i] = ind_acc[i] / float(ind_acc_count[i])
# import ipdb; ipdb.set_trace()
# exit(0)
print("Independent keypoint accuracy: ",ind_acc)
with open('Individual_Keypoint_Accuracy.txt', 'w') as f:
for item in ind_acc:
f.write("%s\n" % item)
return acc.avg
def validate(config, val_loader, val_dataset, model, criterion, output_dir,
tb_log_dir, writer_dict=None, thresh = 0.0005, save_pickle=True):
animal_list = [
'antelope',
'bobcat',
'buffalo',
'chihuahua',
'collie',
'cow',
'dalmatian',
'deer',
'elephant',
'fox',
'german+shepherd',
'giant+panda',
'giraffe',
'grizzly+bear',
'hippopotamus',
'horse',
'leopard',
'lion',
'moose',
'otter',
'ox',
'persian+cat',
'pig',
'polar+bear',
'rabbit',
'raccoon',
'rhinoceros',
'sheep',
'siamese+cat',
'skunk',
'squirrel',
'tiger',
'weasel',
'wolf',
'zebra',
'all']
animal_ids = list(range(1,36))
#crnt_animal_id = 8
for crnt_animal_id in animal_ids:
batch_time = AverageMeter()
losses = AverageMeter()
acc = AverageMeter()
# switch to evaluate mode
model.eval()
num_samples = len(val_dataset)
all_preds = np.zeros(
(num_samples, config.MODEL.NUM_JOINTS, 3),
dtype=np.float32
)
all_boxes = np.zeros((num_samples, 6))
all_orig_boxes = np.zeros((num_samples, 2,2))
image_path = []
filenames = []
imgnums = []
idx = 0
ind_accs = []
with torch.no_grad():
end = time.time()
for i, (input, target, target_weight, meta) in enumerate(val_loader):
# compute output
# import ipdb; ipdb.set_trace()
# exit(0)
if crnt_animal_id > 0 and not meta['image'][0].split('/')[-1].startswith(animal_list[crnt_animal_id - 1]):
continue
outputs = model(input)
if isinstance(outputs, list):
output = outputs[-1]
else:
output = outputs
if config.TEST.FLIP_TEST:
# this part is ugly, because pytorch has not supported negative index
# input_flipped = model(input[:, :, :, ::-1])
input_flipped = np.flip(input.cpu().numpy(), 3).copy()
input_flipped = torch.from_numpy(input_flipped).cuda()
outputs_flipped = model(input_flipped)
if isinstance(outputs_flipped, list):
output_flipped = outputs_flipped[-1]
else:
output_flipped = outputs_flipped
output_flipped = flip_back(output_flipped.cpu().numpy(),
val_dataset.flip_pairs)
output_flipped = torch.from_numpy(output_flipped.copy()).cuda()
# feature is not aligned, shift flipped heatmap for higher accuracy
if config.TEST.SHIFT_HEATMAP:
output_flipped[:, :, :, 1:] = \
output_flipped.clone()[:, :, :, 0:-1]
output = (output + output_flipped) * 0.5
target = target.cuda(non_blocking=True)
target_weight = target_weight.cuda(non_blocking=True)
loss = criterion(output, target, target_weight)
c = meta['center'].numpy()
s = meta['scale'].numpy()
score = meta['score'].numpy()
bbox = meta['bbox'].numpy()
num_images = input.size(0)
# measure accuracy and record loss
losses.update(loss.item(), num_images)
#w_h_original = (s[:, [1, 0]] * 200.0) / 1.25
#diagonal = np.sqrt(s[:, 0] * s[:, 0] + s[:, 1] * s[:, 1])
bbox_w = bbox[:, 1,0] - bbox[:, 0,0]
bbox_h = bbox[:, 1,1] - bbox[:, 0,1]
diagonal = np.sqrt(bbox_w * bbox_w + bbox_h * bbox_h)
ind_acc, avg_acc, cnt, pred = accuracy_bbox(output.cpu().numpy(),
target.cpu().numpy(), scale = s[:, [1, 0]], thr=thresh*diagonal) # swapping (w,h) to (h,w)
ind_accs.append(ind_acc[1:])
#print(avg_acc)
acc.update(avg_acc, cnt)
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
preds, maxvals = get_final_preds(
config, output.clone().cpu().numpy(), c, s)
all_preds[idx:idx + num_images, :, 0:2] = preds[:, :, 0:2]
all_preds[idx:idx + num_images, :, 2:3] = maxvals
# double check this all_boxes parts
all_boxes[idx:idx + num_images, 0:2] = c[:, 0:2]
all_boxes[idx:idx + num_images, 2:4] = s[:, 0:2]
all_boxes[idx:idx + num_images, 4] = np.prod(s*200, 1)
all_boxes[idx:idx + num_images, 5] = score
image_path.extend(meta['image'])
#saving pickles
#if save_pickle:
# save_pred_pickle(config, input, meta, target, pred*4, output, prefix)
idx += num_images
if avg_acc < 0.35:#i % config.PRINT_FREQ == 0:
msg = 'Test: [{0}/{1}]\t' \
'Time {batch_time.val:.3f} ({batch_time.avg:.3f})\t' \
'Loss {loss.val:.4f} ({loss.avg:.4f})\t' \
'Accuracy {acc.val:.3f} ({acc.avg:.3f})'.format(
i, len(val_loader), batch_time=batch_time,
loss=losses, acc=acc)
logger.info(msg)
prefix = '{}_{}'.format(
os.path.join(output_dir, 'bad', animal_list[crnt_animal_id - 1]), i
)
#save_debug_images(config, input, meta, target, pred*4, output, prefix)
# output heatmap size becomes 4 times smaller than the input image
save_debug_images_w_bbox(config, input, meta, target, pred*4, output, prefix)
# import ipdb; ipdb.set_trace()
# exit(0)
if False:
name_values, perf_indicator = val_dataset.evaluate(
config, all_preds, output_dir, all_boxes, image_path,
filenames, imgnums
)
model_name = config.MODEL.NAME
if isinstance(name_values, list):
for name_value in name_values:
_print_name_value(name_value, model_name)
else:
_print_name_value(name_values, model_name)
if writer_dict:
writer = writer_dict['writer']
global_steps = writer_dict['valid_global_steps']
writer.add_scalar(
'valid_loss',
losses.avg,
global_steps
)
writer.add_scalar(
'valid_acc',
acc.avg,
global_steps
)
if isinstance(name_values, list):
for name_value in name_values:
writer.add_scalars(
'valid',
dict(name_value),
global_steps
)
else:
writer.add_scalars(
'valid',
dict(name_values),
global_steps
)
writer_dict['valid_global_steps'] = global_steps + 1
else:
perf_indicator = 100
#return perf_indicator
print('Average PCK @ '+ str(thresh) +'= ', acc.avg)
ind_acc = [0]*len(ind_acc[1:])
ind_acc_count = [0]*len(ind_acc)
for ind_batch in ind_accs:
for i, val in enumerate(ind_batch):
if val >= 0:
ind_acc_count[i] += 1
ind_acc[i] += val
for i in range(len(ind_acc)):
#import ipdb; ipdb.set_trace()
#exit(0)
if ind_acc_count[i] == 0:
ind_acc[i] = -1
else:
ind_acc[i] = ind_acc[i] / float(ind_acc_count[i])
# import ipdb; ipdb.set_trace()
# exit(0)
print("Independent keypoint accuracy of " + animal_list[crnt_animal_id - 1] +": ",ind_acc)
with open('Individual_Keypoint_Accuracy_'+ animal_list[crnt_animal_id - 1] +'.txt', 'w') as f:
for item in ind_acc:
f.write("%s\n" % item)
return acc.avg
def generate_predited_keypoints_and_vis(config, img, model, output_dir,
tb_log_dir):
# switch to evaluate mode
model.eval()
num_samples = len(val_dataset)
all_preds = np.zeros(
(num_samples, config.MODEL.NUM_JOINTS, 3),
dtype=np.float32
)
all_boxes = np.zeros((num_samples, 6))
all_orig_boxes = np.zeros((num_samples, 2,2))
image_path = []
filenames = []
imgnums = []
idx = 0
ind_accs = []
with torch.no_grad():
end = time.time()
for i, (input, target, target_weight, meta) in enumerate(val_loader):
# compute output
outputs = model(input)
if isinstance(outputs, list):
output = outputs[-1]
else:
output = outputs
target = target.cuda(non_blocking=True)
target_weight = target_weight.cuda(non_blocking=True)
loss = criterion(output, target, target_weight)
c = meta['center'].numpy()
s = meta['scale'].numpy()
score = meta['score'].numpy()
bbox = meta['bbox'].numpy()
num_images = input.size(0)
# measure accuracy and record loss
losses.update(loss.item(), num_images)
#w_h_original = (s[:, [1, 0]] * 200.0) / 1.25
#diagonal = np.sqrt(s[:, 0] * s[:, 0] + s[:, 1] * s[:, 1])
bbox_w = bbox[:, 1,0] - bbox[:, 0,0]
bbox_h = bbox[:, 1,1] - bbox[:, 0,1]
diagonal = np.sqrt(bbox_w * bbox_w + bbox_h * bbox_h)
ind_acc, avg_acc, cnt, pred = accuracy_bbox(output.cpu().numpy(),
target.cpu().numpy(), scale = s[:, [1, 0]], thr=thresh*diagonal) # swapping (w,h) to (h,w)
ind_accs.append(ind_acc[1:])
#print(avg_acc)
acc.update(avg_acc, cnt)
# measure elapsed time
batch_time.update(time.time() - end)
end = time.time()
preds, maxvals = get_final_preds(
config, output.clone().cpu().numpy(), c, s)
all_preds[idx:idx + num_images, :, 0:2] = preds[:, :, 0:2]
all_preds[idx:idx + num_images, :, 2:3] = maxvals
# double check this all_boxes parts
all_boxes[idx:idx + num_images, 0:2] = c[:, 0:2]
all_boxes[idx:idx + num_images, 2:4] = s[:, 0:2]
all_boxes[idx:idx + num_images, 4] = np.prod(s*200, 1)
all_boxes[idx:idx + num_images, 5] = score
image_path.extend(meta['image'])
#saving pickles
#if save_pickle:
# save_pred_pickle(config, input, meta, target, pred*4, output, prefix)
idx += num_images
save_debug_images_w_bbox(config, input, meta, target, pred*4, output, prefix)
#import ipdb; ipdb.set_trace()
#exit(0)
# markdown format output
def _print_name_value(name_value, full_arch_name):
names = name_value.keys()
values = name_value.values()
num_values = len(name_value)
logger.info(
'| Arch ' +
' '.join(['| {}'.format(name) for name in names]) +
' |'
)
logger.info('|---' * (num_values+1) + '|')
if len(full_arch_name) > 15:
full_arch_name = full_arch_name[:8] + '...'
logger.info(
'| ' + full_arch_name + ' ' +
' '.join(['| {:.3f}'.format(value) for value in values]) +
' |'
)
class AverageMeter(object):
"""Computes and stores the average and current value"""
def __init__(self):
self.reset()
def reset(self):
self.val = 0
self.avg = 0
self.sum = 0
self.count = 0
def update(self, val, n=1):
self.val = val
self.sum += val * n
self.count += n
self.avg = self.sum / self.count if self.count != 0 else 0
| 34.546207
| 139
| 0.488381
| 2,766
| 25,046
| 4.214027
| 0.109544
| 0.017502
| 0.02162
| 0.023164
| 0.861616
| 0.842999
| 0.810741
| 0.789894
| 0.769132
| 0.762354
| 0
| 0.019991
| 0.386848
| 25,046
| 724
| 140
| 34.593923
| 0.739012
| 0.113751
| 0
| 0.79249
| 0
| 0
| 0.069426
| 0.008096
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01581
| false
| 0
| 0.023715
| 0
| 0.045455
| 0.019763
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ee200662df24422db2e54f177688bb972e4ac43a
| 37,191
|
py
|
Python
|
leiaapi/generated/api/annotation_api.py
|
labinnovationdocapost/leia-api-python-sdk
|
6001dce68362d4e836b57e52d4da17710f25ed12
|
[
"MIT"
] | null | null | null |
leiaapi/generated/api/annotation_api.py
|
labinnovationdocapost/leia-api-python-sdk
|
6001dce68362d4e836b57e52d4da17710f25ed12
|
[
"MIT"
] | null | null | null |
leiaapi/generated/api/annotation_api.py
|
labinnovationdocapost/leia-api-python-sdk
|
6001dce68362d4e836b57e52d4da17710f25ed12
|
[
"MIT"
] | null | null | null |
# coding: utf-8
"""
LEIA RESTful API for AI
Leia API # noqa: E501
OpenAPI spec version: 1.0.0
Contact: contact@leia.io
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from leiaapi.generated.api_client import ApiClient
class AnnotationApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_annotation(self, body, token, annotation_type, document_id, **kwargs): # noqa: E501
"""Creates an annotation # noqa: E501
Creates an annotation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_annotation(body, token, annotation_type, document_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object body: The prediction that should be associated to document in this annotation, in free form json (required)
:param str token: The login token obtained via GET /login/{api_key} (required)
:param AnnotationTypes annotation_type: The type of the annotation (required)
:param str document_id: The id of the document to annotate (required)
:param str name: The name of the annotation (for information purposes only)
:param list[str] tags: The tags of the annotation
:return: Annotation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_annotation_with_http_info(body, token, annotation_type, document_id, **kwargs) # noqa: E501
else:
(data) = self.create_annotation_with_http_info(body, token, annotation_type, document_id, **kwargs) # noqa: E501
return data
def create_annotation_with_http_info(self, body, token, annotation_type, document_id, **kwargs): # noqa: E501
"""Creates an annotation # noqa: E501
Creates an annotation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_annotation_with_http_info(body, token, annotation_type, document_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object body: The prediction that should be associated to document in this annotation, in free form json (required)
:param str token: The login token obtained via GET /login/{api_key} (required)
:param AnnotationTypes annotation_type: The type of the annotation (required)
:param str document_id: The id of the document to annotate (required)
:param str name: The name of the annotation (for information purposes only)
:param list[str] tags: The tags of the annotation
:return: Annotation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'token', 'annotation_type', 'document_id', 'name', 'tags'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_annotation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_annotation`") # noqa: E501
# verify the required parameter 'token' is set
if ('token' not in params or
params['token'] is None):
raise ValueError("Missing the required parameter `token` when calling `create_annotation`") # noqa: E501
# verify the required parameter 'annotation_type' is set
if ('annotation_type' not in params or
params['annotation_type'] is None):
raise ValueError("Missing the required parameter `annotation_type` when calling `create_annotation`") # noqa: E501
# verify the required parameter 'document_id' is set
if ('document_id' not in params or
params['document_id'] is None):
raise ValueError("Missing the required parameter `document_id` when calling `create_annotation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'document_id' in params:
path_params['document_id'] = params['document_id'] # noqa: E501
query_params = []
if 'annotation_type' in params:
query_params.append(('annotation_type', params['annotation_type'])) # noqa: E501
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'tags' in params:
query_params.append(('tags', params['tags'])) # noqa: E501
collection_formats['tags'] = 'multi' # noqa: E501
header_params = {}
if 'token' in params:
header_params['token'] = params['token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/annotation/{document_id}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Annotation', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_annotation(self, token, annotation_id, **kwargs): # noqa: E501
"""Deletes an annotation # noqa: E501
Deletes an annotation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_annotation(token, annotation_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token: The login token obtained via GET /login/{api_key} (required)
:param str annotation_id: The id of the annotation (for information purposes only) (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_annotation_with_http_info(token, annotation_id, **kwargs) # noqa: E501
else:
(data) = self.delete_annotation_with_http_info(token, annotation_id, **kwargs) # noqa: E501
return data
def delete_annotation_with_http_info(self, token, annotation_id, **kwargs): # noqa: E501
"""Deletes an annotation # noqa: E501
Deletes an annotation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_annotation_with_http_info(token, annotation_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token: The login token obtained via GET /login/{api_key} (required)
:param str annotation_id: The id of the annotation (for information purposes only) (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token', 'annotation_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_annotation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token' is set
if ('token' not in params or
params['token'] is None):
raise ValueError("Missing the required parameter `token` when calling `delete_annotation`") # noqa: E501
# verify the required parameter 'annotation_id' is set
if ('annotation_id' not in params or
params['annotation_id'] is None):
raise ValueError("Missing the required parameter `annotation_id` when calling `delete_annotation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'annotation_id' in params:
path_params['annotation_id'] = params['annotation_id'] # noqa: E501
query_params = []
header_params = {}
if 'token' in params:
header_params['token'] = params['token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/annotation/{annotation_id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_annotation(self, token, annotation_id, **kwargs): # noqa: E501
"""Retrieves an annotation # noqa: E501
Retrieves an annotation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_annotation(token, annotation_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token: The login token obtained via GET /login/{api_key} (required)
:param str annotation_id: The id of the annotation (for information purposes only) (required)
:return: Annotation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_annotation_with_http_info(token, annotation_id, **kwargs) # noqa: E501
else:
(data) = self.get_annotation_with_http_info(token, annotation_id, **kwargs) # noqa: E501
return data
def get_annotation_with_http_info(self, token, annotation_id, **kwargs): # noqa: E501
"""Retrieves an annotation # noqa: E501
Retrieves an annotation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_annotation_with_http_info(token, annotation_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token: The login token obtained via GET /login/{api_key} (required)
:param str annotation_id: The id of the annotation (for information purposes only) (required)
:return: Annotation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token', 'annotation_id'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_annotation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token' is set
if ('token' not in params or
params['token'] is None):
raise ValueError("Missing the required parameter `token` when calling `get_annotation`") # noqa: E501
# verify the required parameter 'annotation_id' is set
if ('annotation_id' not in params or
params['annotation_id'] is None):
raise ValueError("Missing the required parameter `annotation_id` when calling `get_annotation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'annotation_id' in params:
path_params['annotation_id'] = params['annotation_id'] # noqa: E501
query_params = []
header_params = {}
if 'token' in params:
header_params['token'] = params['token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/annotation/{annotation_id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Annotation', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_annotations(self, token, **kwargs): # noqa: E501
"""Retrieves annotations (paginated) # noqa: E501
Retrieves annotations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_annotations(token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token: The login token obtained via GET /login/{api_key} (required)
:param str annotation_id: If specified, filters the annotations id
:param AnnotationTypes annotation_type: If specified, filters the annotations by type
:param str name: If specified, filters the annotations by name
:param list[str] tags: If specified, filters the annotations by tag
:param str document_id: If specified, filters the annotations attached to a given document
:param datetime created_after: If specified, keeps only annotations created after given UTC timestamp (ISO 8601 format : yyyy-MM-ddThh:mm:ss)
:param datetime created_before: If specified, keeps only annotations created before given UTC timestamp (ISO 8601 format : yyyy-MM-ddThh:mm:ss)
:param int offset: Number of the first annotation to send (pagination)
:param int limit: Maximum number of annotation to send (pagination)
:return: list[Annotation]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_annotations_with_http_info(token, **kwargs) # noqa: E501
else:
(data) = self.get_annotations_with_http_info(token, **kwargs) # noqa: E501
return data
def get_annotations_with_http_info(self, token, **kwargs): # noqa: E501
"""Retrieves annotations (paginated) # noqa: E501
Retrieves annotations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_annotations_with_http_info(token, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token: The login token obtained via GET /login/{api_key} (required)
:param str annotation_id: If specified, filters the annotations id
:param AnnotationTypes annotation_type: If specified, filters the annotations by type
:param str name: If specified, filters the annotations by name
:param list[str] tags: If specified, filters the annotations by tag
:param str document_id: If specified, filters the annotations attached to a given document
:param datetime created_after: If specified, keeps only annotations created after given UTC timestamp (ISO 8601 format : yyyy-MM-ddThh:mm:ss)
:param datetime created_before: If specified, keeps only annotations created before given UTC timestamp (ISO 8601 format : yyyy-MM-ddThh:mm:ss)
:param int offset: Number of the first annotation to send (pagination)
:param int limit: Maximum number of annotation to send (pagination)
:return: list[Annotation]
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token', 'annotation_id', 'annotation_type', 'name', 'tags', 'document_id', 'created_after', 'created_before', 'offset', 'limit'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_annotations" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token' is set
if ('token' not in params or
params['token'] is None):
raise ValueError("Missing the required parameter `token` when calling `get_annotations`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'annotation_id' in params:
query_params.append(('annotation_id', params['annotation_id'])) # noqa: E501
if 'annotation_type' in params:
query_params.append(('annotation_type', params['annotation_type'])) # noqa: E501
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'tags' in params:
query_params.append(('tags', params['tags'])) # noqa: E501
collection_formats['tags'] = 'multi' # noqa: E501
if 'document_id' in params:
query_params.append(('document_id', params['document_id'])) # noqa: E501
if 'created_after' in params:
query_params.append(('created_after', params['created_after'])) # noqa: E501
if 'created_before' in params:
query_params.append(('created_before', params['created_before'])) # noqa: E501
if 'offset' in params:
query_params.append(('offset', params['offset'])) # noqa: E501
if 'limit' in params:
query_params.append(('limit', params['limit'])) # noqa: E501
header_params = {}
if 'token' in params:
header_params['token'] = params['token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/annotation', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[Annotation]', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def tag_annotation(self, token, annotation_id, tag, **kwargs): # noqa: E501
"""Tags an annotation # noqa: E501
Tags an annotation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.tag_annotation(token, annotation_id, tag, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token: The login token obtained via GET /login/{api_key} (required)
:param str annotation_id: The id of the annotation (required)
:param str tag: The tag to add to the annotation (required)
:return: Annotation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.tag_annotation_with_http_info(token, annotation_id, tag, **kwargs) # noqa: E501
else:
(data) = self.tag_annotation_with_http_info(token, annotation_id, tag, **kwargs) # noqa: E501
return data
def tag_annotation_with_http_info(self, token, annotation_id, tag, **kwargs): # noqa: E501
"""Tags an annotation # noqa: E501
Tags an annotation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.tag_annotation_with_http_info(token, annotation_id, tag, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token: The login token obtained via GET /login/{api_key} (required)
:param str annotation_id: The id of the annotation (required)
:param str tag: The tag to add to the annotation (required)
:return: Annotation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token', 'annotation_id', 'tag'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method tag_annotation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token' is set
if ('token' not in params or
params['token'] is None):
raise ValueError("Missing the required parameter `token` when calling `tag_annotation`") # noqa: E501
# verify the required parameter 'annotation_id' is set
if ('annotation_id' not in params or
params['annotation_id'] is None):
raise ValueError("Missing the required parameter `annotation_id` when calling `tag_annotation`") # noqa: E501
# verify the required parameter 'tag' is set
if ('tag' not in params or
params['tag'] is None):
raise ValueError("Missing the required parameter `tag` when calling `tag_annotation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'annotation_id' in params:
path_params['annotation_id'] = params['annotation_id'] # noqa: E501
if 'tag' in params:
path_params['tag'] = params['tag'] # noqa: E501
query_params = []
header_params = {}
if 'token' in params:
header_params['token'] = params['token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/annotation/{annotation_id}/tag/{tag}', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Annotation', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def untag_annotation(self, token, annotation_id, tag, **kwargs): # noqa: E501
"""Untags an annotation # noqa: E501
Untags an annotation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.untag_annotation(token, annotation_id, tag, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token: The login token obtained via GET /login/{api_key} (required)
:param str annotation_id: The id of the annotation (required)
:param str tag: The tag to delete from the annotation (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.untag_annotation_with_http_info(token, annotation_id, tag, **kwargs) # noqa: E501
else:
(data) = self.untag_annotation_with_http_info(token, annotation_id, tag, **kwargs) # noqa: E501
return data
def untag_annotation_with_http_info(self, token, annotation_id, tag, **kwargs): # noqa: E501
"""Untags an annotation # noqa: E501
Untags an annotation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.untag_annotation_with_http_info(token, annotation_id, tag, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str token: The login token obtained via GET /login/{api_key} (required)
:param str annotation_id: The id of the annotation (required)
:param str tag: The tag to delete from the annotation (required)
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['token', 'annotation_id', 'tag'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method untag_annotation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'token' is set
if ('token' not in params or
params['token'] is None):
raise ValueError("Missing the required parameter `token` when calling `untag_annotation`") # noqa: E501
# verify the required parameter 'annotation_id' is set
if ('annotation_id' not in params or
params['annotation_id'] is None):
raise ValueError("Missing the required parameter `annotation_id` when calling `untag_annotation`") # noqa: E501
# verify the required parameter 'tag' is set
if ('tag' not in params or
params['tag'] is None):
raise ValueError("Missing the required parameter `tag` when calling `untag_annotation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'annotation_id' in params:
path_params['annotation_id'] = params['annotation_id'] # noqa: E501
if 'tag' in params:
path_params['tag'] = params['tag'] # noqa: E501
query_params = []
header_params = {}
if 'token' in params:
header_params['token'] = params['token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/annotation/{annotation_id}/tag/{tag}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_annotation(self, body, token, annotation_id, **kwargs): # noqa: E501
"""Updates an annotation # noqa: E501
Updates an annotation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_annotation(body, token, annotation_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object body: The new prediction that should be associated to document in this annotation, in free form json (required)
:param str token: The login token obtained via GET /login/{api_key} (required)
:param str annotation_id: The id of the annotation to modify (required)
:param str name: The new name of the annotation (won't change if not set)
:return: Annotation
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_annotation_with_http_info(body, token, annotation_id, **kwargs) # noqa: E501
else:
(data) = self.update_annotation_with_http_info(body, token, annotation_id, **kwargs) # noqa: E501
return data
def update_annotation_with_http_info(self, body, token, annotation_id, **kwargs): # noqa: E501
"""Updates an annotation # noqa: E501
Updates an annotation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_annotation_with_http_info(body, token, annotation_id, async_req=True)
>>> result = thread.get()
:param async_req bool
:param object body: The new prediction that should be associated to document in this annotation, in free form json (required)
:param str token: The login token obtained via GET /login/{api_key} (required)
:param str annotation_id: The id of the annotation to modify (required)
:param str name: The new name of the annotation (won't change if not set)
:return: Annotation
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body', 'token', 'annotation_id', 'name'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_annotation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_annotation`") # noqa: E501
# verify the required parameter 'token' is set
if ('token' not in params or
params['token'] is None):
raise ValueError("Missing the required parameter `token` when calling `update_annotation`") # noqa: E501
# verify the required parameter 'annotation_id' is set
if ('annotation_id' not in params or
params['annotation_id'] is None):
raise ValueError("Missing the required parameter `annotation_id` when calling `update_annotation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'annotation_id' in params:
path_params['annotation_id'] = params['annotation_id'] # noqa: E501
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
header_params = {}
if 'token' in params:
header_params['token'] = params['token'] # noqa: E501
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = [] # noqa: E501
return self.api_client.call_api(
'/annotation/{annotation_id}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Annotation', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 44.433692
| 165
| 0.622274
| 4,383
| 37,191
| 5.082592
| 0.048141
| 0.048481
| 0.037168
| 0.022624
| 0.963191
| 0.954886
| 0.94124
| 0.932621
| 0.925484
| 0.918257
| 0
| 0.016225
| 0.287381
| 37,191
| 836
| 166
| 44.486842
| 0.824322
| 0.368745
| 0
| 0.790749
| 1
| 0
| 0.214421
| 0.029713
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03304
| false
| 0
| 0.008811
| 0
| 0.090308
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ee2991b5072eb86974c01494cfe15dbadbb80933
| 70
|
py
|
Python
|
apps/profiles/test/test_admin.py
|
ecoo-app/ecoo-backend
|
ffe54abcd2e8c1a18ef2fa992c45a10f8232a4a0
|
[
"MIT"
] | 1
|
2021-03-31T18:25:44.000Z
|
2021-03-31T18:25:44.000Z
|
apps/profiles/test/test_admin.py
|
ecoo-app/ecoo-backend
|
ffe54abcd2e8c1a18ef2fa992c45a10f8232a4a0
|
[
"MIT"
] | null | null | null |
apps/profiles/test/test_admin.py
|
ecoo-app/ecoo-backend
|
ffe54abcd2e8c1a18ef2fa992c45a10f8232a4a0
|
[
"MIT"
] | 1
|
2021-01-14T09:27:42.000Z
|
2021-01-14T09:27:42.000Z
|
# TODO:FIXME: add tests for the admin pages and for the admin actions
| 35
| 69
| 0.771429
| 13
| 70
| 4.153846
| 0.769231
| 0.222222
| 0.407407
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.185714
| 70
| 1
| 70
| 70
| 0.947368
| 0.957143
| 0
| null | 0
| null | 0
| 0
| null | 0
| 0
| 1
| null | 1
| null | true
| 0
| 0
| null | null | null | 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c9da5abc3fe858a3c3a1ca9ab636077eeea8be8f
| 102
|
py
|
Python
|
server/commands/__init__.py
|
griseduardo/Facial-Recognition-Database-Management-System
|
5d451791cb131164930cc4f886ec6300d88a8c67
|
[
"MIT"
] | 6
|
2021-09-13T13:45:49.000Z
|
2021-12-20T15:36:10.000Z
|
server/commands/__init__.py
|
griseduardo/Facial-Recognition-Database-Management-System
|
5d451791cb131164930cc4f886ec6300d88a8c67
|
[
"MIT"
] | 31
|
2021-09-11T05:52:56.000Z
|
2021-11-07T14:35:41.000Z
|
server/commands/__init__.py
|
griseduardo/Facial-Recognition-Database-Management-System
|
5d451791cb131164930cc4f886ec6300d88a8c67
|
[
"MIT"
] | 2
|
2021-09-13T04:08:05.000Z
|
2021-09-26T04:06:53.000Z
|
from .build import cli as build_cli
from .data import cli as data_cli
from .dev import cli as dev_cli
| 25.5
| 35
| 0.794118
| 21
| 102
| 3.714286
| 0.333333
| 0.346154
| 0.423077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.176471
| 102
| 3
| 36
| 34
| 0.928571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c9de76fbaeb4d23eb423c491d7aee3cfbb73f205
| 151
|
py
|
Python
|
garagepi/framework/usecase/__init__.py
|
constructorfleet/GaragePi-Assistant
|
e648e853472adfb3e0a97d8ffbcdf9453f666d6a
|
[
"MIT"
] | null | null | null |
garagepi/framework/usecase/__init__.py
|
constructorfleet/GaragePi-Assistant
|
e648e853472adfb3e0a97d8ffbcdf9453f666d6a
|
[
"MIT"
] | null | null | null |
garagepi/framework/usecase/__init__.py
|
constructorfleet/GaragePi-Assistant
|
e648e853472adfb3e0a97d8ffbcdf9453f666d6a
|
[
"MIT"
] | null | null | null |
"""Use case frameworks."""
from garagepi.framework.usecase.UseCase import UseCase
from garagepi.framework.usecase.CommandUseCase import CommandUseCase
| 37.75
| 68
| 0.84106
| 17
| 151
| 7.470588
| 0.529412
| 0.188976
| 0.330709
| 0.440945
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.072848
| 151
| 4
| 68
| 37.75
| 0.907143
| 0.13245
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4e78e3258c37091c21d4e1273e18766e76802d5a
| 7,179
|
py
|
Python
|
tests/resources/test_conferences.py
|
vaibhav-plivo/plivo-python
|
e4ae7559ba4647ac0e1af523c94d49a3fd6a24ca
|
[
"MIT"
] | null | null | null |
tests/resources/test_conferences.py
|
vaibhav-plivo/plivo-python
|
e4ae7559ba4647ac0e1af523c94d49a3fd6a24ca
|
[
"MIT"
] | 10
|
2020-10-19T06:47:45.000Z
|
2021-06-25T15:41:34.000Z
|
tests/resources/test_conferences.py
|
vaibhav-plivo/plivo-python
|
e4ae7559ba4647ac0e1af523c94d49a3fd6a24ca
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
from tests.decorators import with_response
from .. import PlivoResourceTestCase
conference_name = 'My Conf Room'
member_id = 'Test Member ID'
class ConferenceTest(PlivoResourceTestCase):
@with_response(200)
def test_get(self):
conference = self.client.conferences.get(conference_name)
self.assertResponseMatches(conference)
self.assertEqual(self.client.current_request.method, 'GET')
self.assertUrlEqual(
self.get_url('Conference', conference_name),
self.client.current_request.url)
self.assertEqual(conference.conference_name, conference_name)
@with_response(204)
def test_delete(self):
self.client.conferences.delete(conference_name)
self.assertEqual(self.client.current_request.method, 'DELETE')
self.assertUrlEqual(
self.get_url('Conference', conference_name),
self.client.current_request.url)
@with_response(204)
def test_delete_all(self):
self.client.conferences.delete_all()
self.assertEqual(self.client.current_request.method, 'DELETE')
self.assertUrlEqual(
self.get_url('Conference'), self.client.current_request.url)
@with_response(204, method_name='delete')
def test_hangup(self):
self.client.conferences.hangup(conference_name)
self.assertEqual(self.client.current_request.method, 'DELETE')
self.assertUrlEqual(
self.get_url('Conference', conference_name),
self.client.current_request.url)
@with_response(204, method_name='delete_all')
def test_hangup_all(self):
self.client.conferences.hangup_all()
self.assertEqual(self.client.current_request.method, 'DELETE')
self.assertUrlEqual(
self.get_url('Conference'), self.client.current_request.url)
@with_response(200)
def test_list(self):
self.client.conferences.list()
self.assertEqual(self.client.current_request.method, 'GET')
self.assertUrlEqual(
self.get_url('Conference'), self.client.current_request.url)
@with_response(202)
def test_record_create(self):
self.client.conferences.record(
conference_name=conference_name,
file_format='mp3',
transcription_url='http://example.transcription.url')
self.assertEqual(self.client.current_request.method, 'POST')
self.assertUrlEqual(
self.get_url('Conference', conference_name, 'Record'),
self.client.current_request.url)
@with_response(204)
def test_record_delete(self):
self.client.conferences.record_stop(conference_name=conference_name)
self.assertEqual(self.client.current_request.method, 'DELETE')
self.assertUrlEqual(
self.get_url('Conference', conference_name, 'Record'),
self.client.current_request.url)
class ConferenceMemberTest(PlivoResourceTestCase):
@with_response(202)
def test_deaf_create(self):
self.client.conferences.member_deaf(
conference_name=conference_name, member_id=member_id)
self.assertEqual(self.client.current_request.method, 'POST')
self.assertUrlEqual(
self.get_url('Conference', conference_name, 'Member', member_id,
'Deaf'), self.client.current_request.url)
@with_response(202)
def test_speak_create(self):
self.client.conferences.member_speak(
conference_name=conference_name,
member_id=member_id,
text='Hello World!')
self.assertEqual(self.client.current_request.method, 'POST')
self.assertUrlEqual(
self.get_url('Conference', conference_name, 'Member', member_id,
'Speak'), self.client.current_request.url)
@with_response(202)
def test_play_create(self):
self.client.conferences.member_play(
conference_name=conference_name,
member_id=member_id,
url='http://url.to.sound')
self.assertEqual(self.client.current_request.method, 'POST')
self.assertUrlEqual(
self.get_url('Conference', conference_name, 'Member', member_id,
'Play'), self.client.current_request.url)
@with_response(202)
def test_mute_create(self):
self.client.conferences.member_mute(
conference_name=conference_name, member_id=member_id)
self.assertEqual(self.client.current_request.method, 'POST')
self.assertUrlEqual(
self.get_url('Conference', conference_name, 'Member', member_id,
'Mute'), self.client.current_request.url)
@with_response(204)
def test_mute_delete(self):
self.client.conferences.member_mute_stop(
conference_name=conference_name, member_id=member_id)
self.assertEqual(self.client.current_request.method, 'DELETE')
self.assertUrlEqual(
self.get_url('Conference', conference_name, 'Member', member_id,
'Mute'), self.client.current_request.url)
@with_response(204)
def test_deaf_delete(self):
self.client.conferences.member_deaf_stop(
conference_name=conference_name, member_id=member_id)
self.assertEqual(self.client.current_request.method, 'DELETE')
self.assertUrlEqual(
self.get_url('Conference', conference_name, 'Member', member_id,
'Deaf'), self.client.current_request.url)
@with_response(204)
def test_speak_delete(self):
self.client.conferences.member_speak_stop(
conference_name=conference_name, member_id=member_id)
self.assertEqual(self.client.current_request.method, 'DELETE')
self.assertUrlEqual(
self.get_url('Conference', conference_name, 'Member', member_id,
'Speak'), self.client.current_request.url)
@with_response(204)
def test_play_delete(self):
self.client.conferences.member_play_stop(
conference_name=conference_name, member_id=member_id)
self.assertEqual(self.client.current_request.method, 'DELETE')
self.assertUrlEqual(
self.get_url('Conference', conference_name, 'Member', member_id,
'Play'), self.client.current_request.url)
@with_response(202)
def test_kick_create(self):
self.client.conferences.member_kick(
conference_name=conference_name, member_id=member_id)
self.assertEqual(self.client.current_request.method, 'POST')
self.assertUrlEqual(
self.get_url('Conference', conference_name, 'Member', member_id,
'Kick'), self.client.current_request.url)
@with_response(204)
def test_delete(self):
self.client.conferences.member_hangup(
conference_name=conference_name, member_id=member_id)
self.assertEqual(self.client.current_request.method, 'DELETE')
self.assertUrlEqual(
self.get_url('Conference', conference_name, 'Member', member_id),
self.client.current_request.url)
| 41.738372
| 77
| 0.670846
| 798
| 7,179
| 5.781955
| 0.075188
| 0.117035
| 0.13264
| 0.187256
| 0.871695
| 0.811443
| 0.730603
| 0.730603
| 0.71153
| 0.71153
| 0
| 0.010036
| 0.222733
| 7,179
| 171
| 78
| 41.982456
| 0.816846
| 0.002925
| 0
| 0.662162
| 0
| 0
| 0.068195
| 0
| 0
| 0
| 0
| 0
| 0.256757
| 1
| 0.121622
| false
| 0
| 0.013514
| 0
| 0.148649
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
14e8002279e7eff499c15d81774dddde982a7e1d
| 197
|
py
|
Python
|
malcolm/modules/ADPandABlocks/parts/pandablocksdriverpart.py
|
MattTaylorDLS/pymalcolm
|
995a8e4729bd745f8f617969111cc5a34ce1ac14
|
[
"Apache-2.0"
] | null | null | null |
malcolm/modules/ADPandABlocks/parts/pandablocksdriverpart.py
|
MattTaylorDLS/pymalcolm
|
995a8e4729bd745f8f617969111cc5a34ce1ac14
|
[
"Apache-2.0"
] | null | null | null |
malcolm/modules/ADPandABlocks/parts/pandablocksdriverpart.py
|
MattTaylorDLS/pymalcolm
|
995a8e4729bd745f8f617969111cc5a34ce1ac14
|
[
"Apache-2.0"
] | null | null | null |
from malcolm.modules.ADCore.parts import DetectorDriverPart
from .pandablockschildpart import PandABlocksChildPart
class PandABlocksDriverPart(DetectorDriverPart, PandABlocksChildPart):
pass
| 28.142857
| 70
| 0.86802
| 16
| 197
| 10.6875
| 0.6875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.091371
| 197
| 6
| 71
| 32.833333
| 0.955307
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
119483e8216d43088b2518421d5224f9f634eb40
| 578
|
py
|
Python
|
September 2020/05-Functions-Advanced/Exercises/05-Odd-or-Even.py
|
eclipse-ib/Software-University-Professional-Advanced-Module
|
636385f9e5521840f680644824d725d074b93c9a
|
[
"MIT"
] | null | null | null |
September 2020/05-Functions-Advanced/Exercises/05-Odd-or-Even.py
|
eclipse-ib/Software-University-Professional-Advanced-Module
|
636385f9e5521840f680644824d725d074b93c9a
|
[
"MIT"
] | null | null | null |
September 2020/05-Functions-Advanced/Exercises/05-Odd-or-Even.py
|
eclipse-ib/Software-University-Professional-Advanced-Module
|
636385f9e5521840f680644824d725d074b93c9a
|
[
"MIT"
] | null | null | null |
command = input()
list_of_numbers = [int(i) for i in input().split()]
result = 0
if command == "Odd":
# odd_numbers = list(filter(lambda x: x % 2 != 0, list_of_numbers))
# print(sum(odd_numbers) * len(list_of_numbers))
result = sum(list(filter(lambda x: x % 2 != 0, list_of_numbers))) * len(list_of_numbers)
elif command == "Even":
# even_numbers = list(filter(lambda x: x % 2 == 0, list_of_numbers))
# print(sum(even_numbers) * len(list_of_numbers))
result = sum(list(filter(lambda x: x % 2 == 0, list_of_numbers))) * len(list_of_numbers)
print(result)
| 44.461538
| 92
| 0.66436
| 94
| 578
| 3.851064
| 0.244681
| 0.149171
| 0.323204
| 0.187845
| 0.712707
| 0.712707
| 0.712707
| 0.712707
| 0.712707
| 0.712707
| 0
| 0.018828
| 0.17301
| 578
| 13
| 93
| 44.461538
| 0.738494
| 0.392734
| 0
| 0
| 0
| 0
| 0.020173
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.125
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
119c46ba47e1978b9fbbb88052f786b12f8e915f
| 116
|
py
|
Python
|
bin/ud/__init__.py
|
cedar101/spaCy
|
66e22098a8bb77cbe527b1a4a3c69ec1cfb56f95
|
[
"MIT"
] | 12
|
2019-03-20T20:43:47.000Z
|
2020-04-13T11:10:52.000Z
|
bin/ud/__init__.py
|
cedar101/spaCy
|
66e22098a8bb77cbe527b1a4a3c69ec1cfb56f95
|
[
"MIT"
] | 13
|
2018-06-05T11:54:40.000Z
|
2019-07-02T11:33:14.000Z
|
bin/ud/__init__.py
|
cedar101/spaCy
|
66e22098a8bb77cbe527b1a4a3c69ec1cfb56f95
|
[
"MIT"
] | 4
|
2019-06-07T13:02:33.000Z
|
2021-07-07T07:34:35.000Z
|
from .conll17_ud_eval import main as ud_evaluate # noqa: F401
from .ud_train import main as ud_train # noqa: F401
| 38.666667
| 62
| 0.775862
| 21
| 116
| 4.047619
| 0.52381
| 0.235294
| 0.282353
| 0.329412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0.172414
| 116
| 2
| 63
| 58
| 0.802083
| 0.181034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
eec89fa86b9b016472648fc6b5ee6e398aad42b4
| 38,847
|
py
|
Python
|
tests/test_kmeans.py
|
joezuntz/TreeCorr
|
70fbb88047fa55a5fb28c2ee8aca45459bb1fe40
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
tests/test_kmeans.py
|
joezuntz/TreeCorr
|
70fbb88047fa55a5fb28c2ee8aca45459bb1fe40
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
tests/test_kmeans.py
|
joezuntz/TreeCorr
|
70fbb88047fa55a5fb28c2ee8aca45459bb1fe40
|
[
"BSD-2-Clause-FreeBSD"
] | null | null | null |
# Copyright (c) 2003-2019 by Mike Jarvis
#
# TreeCorr is free software: redistribution and use in source and binary forms,
# with or without modification, are permitted provided that the following
# conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions, and the disclaimer given in the accompanying LICENSE
# file.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions, and the disclaimer given in the documentation
# and/or other materials provided with the distribution.
from __future__ import print_function
import numpy as np
import os
import time
import coord
import warnings
import treecorr
from test_helper import get_from_wiki, CaptureLog, assert_raises, profile, timer
@timer
def test_dessv():
try:
import fitsio
except ImportError:
print('Skipping dessv test, since fitsio is not installed')
return
#treecorr.set_omp_threads(1);
get_from_wiki('des_sv.fits')
file_name = os.path.join('data','des_sv.fits')
cat = treecorr.Catalog(file_name, ra_col='ra', dec_col='dec', ra_units='deg', dec_units='deg')
# Use an odd number to make sure we force some of the shuffle bits in InitializeCenters
# to happen.
npatch = 43
field = cat.getNField(max_top=5)
t0 = time.time()
patches, cen = field.run_kmeans(npatch)
t1 = time.time()
print('patches = ',np.unique(patches))
assert len(patches) == cat.ntot
assert min(patches) == 0
assert max(patches) == npatch-1
# Check the returned center to a direct calculation.
xyz = np.array([cat.x, cat.y, cat.z]).T
direct_cen = np.array([xyz[patches==i].mean(axis=0) for i in range(npatch)])
direct_cen /= np.sqrt(np.sum(direct_cen**2,axis=1)[:,np.newaxis])
np.testing.assert_allclose(cen, direct_cen, atol=1.e-3)
# KMeans minimizes the total inertia.
# Check this value and the rms size, which should also be quite small.
inertia = np.array([np.sum((xyz[patches==i] - cen[i])**2) for i in range(npatch)])
sizes = np.array([np.mean((xyz[patches==i] - cen[i])**2) for i in range(npatch)])**0.5
sizes *= 180. / np.pi * 60. # convert to arcmin
counts = np.array([np.sum(patches==i) for i in range(npatch)])
print('With standard algorithm:')
print('time = ',t1-t0)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
print('mean size = ',np.mean(sizes))
print('rms size = ',np.std(sizes))
assert np.sum(inertia) < 200. # This is specific to this particular field and npatch.
assert np.std(inertia) < 0.3 * np.mean(inertia) # rms is usually < 0.2 * mean
assert np.std(sizes) < 0.1 * np.mean(sizes) # sizes have even less spread usually.
# Should all have similar number of points. Nothing is required here though.
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
# Check the alternate algorithm. rms inertia should be lower.
t0 = time.time()
patches, cen = field.run_kmeans(npatch, alt=True)
t1 = time.time()
assert len(patches) == cat.ntot
assert min(patches) == 0
assert max(patches) == npatch-1
inertia = np.array([np.sum((xyz[patches==i] - cen[i])**2) for i in range(npatch)])
sizes = np.array([np.mean((xyz[patches==i] - cen[i])**2) for i in range(npatch)])**0.5
sizes *= 180. / np.pi * 60. # convert to arcmin
counts = np.array([np.sum(patches==i) for i in range(npatch)])
print('With alternate algorithm:')
print('time = ',t1-t0)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
print('mean size = ',np.mean(sizes))
print('rms size = ',np.std(sizes))
assert np.sum(inertia) < 200. # Total shouldn't increase much. (And often decreases.)
assert np.std(inertia) < 0.15 * np.mean(inertia) # rms should be even smaller here.
assert np.std(sizes) < 0.1 * np.mean(sizes) # This is only a little bit smaller.
# This doesn't keep the counts as equal as the standard algorithm.
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
# Finally, use a field with lots of top level cells to check the other branch in
# InitializeCenters.
field = cat.getNField(min_top=10)
t0 = time.time()
patches, cen = field.run_kmeans(npatch)
t1 = time.time()
assert len(patches) == cat.ntot
assert min(patches) == 0
assert max(patches) == npatch-1
inertia = np.array([np.sum((xyz[patches==i] - cen[i])**2) for i in range(npatch)])
sizes = np.array([np.mean((xyz[patches==i] - cen[i])**2) for i in range(npatch)])**0.5
sizes *= 180. / np.pi * 60. # convert to arcmin
counts = np.array([np.sum(patches==i) for i in range(npatch)])
# This doesn't give as good an initialization, so these are a bit worse usually.
print('With min_top=10:')
print('time = ',t1-t0)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
print('mean size = ',np.mean(sizes))
print('rms size = ',np.std(sizes))
assert np.sum(inertia) < 210.
assert np.std(inertia) < 0.4 * np.mean(inertia) # I've seen over 0.3 x mean here.
assert np.std(sizes) < 0.15 * np.mean(sizes)
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
@timer
def test_radec():
# Very similar to the above, but with a random set of points, so it will run even
# if the user doesn't have fitsio installed.
# In addition, we add weights to make sure that works.
ngal = 100000
s = 10.
rng = np.random.RandomState(8675309)
x = rng.normal(0,s, (ngal,) )
y = rng.normal(0,s, (ngal,) ) + 100 # Put everything at large y, so smallish angle on sky
z = rng.normal(0,s, (ngal,) )
w = rng.random_sample(ngal)
ra, dec = coord.CelestialCoord.xyz_to_radec(x,y,z)
print('minra = ',np.min(ra) * coord.radians / coord.degrees)
print('maxra = ',np.max(ra) * coord.radians / coord.degrees)
print('mindec = ',np.min(dec) * coord.radians / coord.degrees)
print('maxdec = ',np.max(dec) * coord.radians / coord.degrees)
cat = treecorr.Catalog(ra=ra, dec=dec, ra_units='rad', dec_units='rad', w=w)
npatch = 111
field = cat.getNField()
t0 = time.time()
p, cen = field.run_kmeans(npatch)
t1 = time.time()
print('patches = ',np.unique(p))
assert len(p) == cat.ntot
assert min(p) == 0
assert max(p) == npatch-1
# Check the returned center to a direct calculation.
xyz = np.array([cat.x, cat.y, cat.z]).T
direct_cen = np.array([np.average(xyz[p==i], axis=0, weights=w[p==i]) for i in range(npatch)])
direct_cen /= np.sqrt(np.sum(direct_cen**2,axis=1)[:,np.newaxis])
np.testing.assert_allclose(cen, direct_cen, atol=2.e-3)
inertia = np.array([np.sum(w[p==i][:,None] * (xyz[p==i] - cen[i])**2) for i in range(npatch)])
counts = np.array([np.sum(w[p==i]) for i in range(npatch)])
print('With standard algorithm:')
print('time = ',t1-t0)
print('inertia = ',inertia)
print('counts = ',counts)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
assert np.sum(inertia) < 200. # This is specific to this particular field and npatch.
assert np.std(inertia) < 0.3 * np.mean(inertia) # rms is usually small mean
# With weights, these aren't actually all that similar. The range is more than a
# factor of 10. I think because it varies whether high weight points happen to be near the
# edges or middles of patches, so the total weight varies when you target having the
# inertias be relatively similar.
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
# Check the alternate algorithm. rms inertia should be lower.
t0 = time.time()
p, cen = field.run_kmeans(npatch, alt=True)
t1 = time.time()
assert len(p) == cat.ntot
assert min(p) == 0
assert max(p) == npatch-1
inertia = np.array([np.sum(w[p==i][:,None] * (xyz[p==i] - cen[i])**2) for i in range(npatch)])
counts = np.array([np.sum(w[p==i]) for i in range(npatch)])
print('With alternate algorithm:')
print('time = ',t1-t0)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
assert np.sum(inertia) < 200. # Total shouldn't increase much. (And often decreases.)
assert np.std(inertia) < 0.15 * np.mean(inertia) # rms should be even smaller here.
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
# Finally, use a field with lots of top level cells to check the other branch in
# InitializeCenters.
field = cat.getNField(min_top=10)
t0 = time.time()
p, cen = field.run_kmeans(npatch)
t1 = time.time()
assert len(p) == cat.ntot
assert min(p) == 0
assert max(p) == npatch-1
inertia = np.array([np.sum(w[p==i][:,None] * (xyz[p==i] - cen[i])**2) for i in range(npatch)])
counts = np.array([np.sum(w[p==i]) for i in range(npatch)])
# This doesn't give as good an initialization, so these are a bit worse usually.
print('With min_top=10:')
print('time = ',t1-t0)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
assert np.sum(inertia) < 210.
assert np.std(inertia) < 0.4 * np.mean(inertia) # I've seen over 0.3 x mean here.
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
@timer
def test_3d():
# Like the above, but using x,y,z positions.
ngal = 100000
s = 1.
rng = np.random.RandomState(8675309)
x = rng.normal(0,s, (ngal,) )
y = rng.normal(0,s, (ngal,) )
z = rng.normal(0,s, (ngal,) )
w = rng.random_sample(ngal) + 1
cat = treecorr.Catalog(x=x, y=y, z=z, w=w)
npatch = 111
field = cat.getNField()
t0 = time.time()
p, cen = field.run_kmeans(npatch)
t1 = time.time()
print('patches = ',np.unique(p))
assert len(p) == cat.ntot
assert min(p) == 0
assert max(p) == npatch-1
xyz = np.array([x, y, z]).T
inertia = np.array([np.sum(w[p==i][:,None] * (xyz[p==i] - cen[i])**2) for i in range(npatch)])
counts = np.array([np.sum(w[p==i]) for i in range(npatch)])
print('With standard algorithm:')
print('time = ',t1-t0)
print('inertia = ',inertia)
print('counts = ',counts)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
assert np.sum(inertia) < 33000.
assert np.std(inertia) < 0.3 * np.mean(inertia) # rms is usually small mean
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
# Should be the same thing with ra, dec, ra
ra, dec = coord.CelestialCoord.xyz_to_radec(x,y,z)
r = (x**2 + y**2 + z**2)**0.5
cat2 = treecorr.Catalog(ra=ra, dec=dec, ra_units='rad', dec_units='rad', r=r, w=w)
field = cat2.getNField()
t0 = time.time()
p2, cen = field.run_kmeans(npatch)
t1 = time.time()
inertia = np.array([np.sum(w[p2==i][:,None] * (xyz[p2==i] - cen[i])**2) for i in range(npatch)])
counts = np.array([np.sum(w[p2==i]) for i in range(npatch)])
print('time = ',t1-t0)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
assert np.sum(inertia) < 33000.
assert np.std(inertia) < 0.3 * np.mean(inertia) # rms is usually small mean
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
# Check the alternate algorithm. rms inertia should be lower.
t0 = time.time()
p, cen = field.run_kmeans(npatch, alt=True)
t1 = time.time()
assert len(p) == cat.ntot
assert min(p) == 0
assert max(p) == npatch-1
inertia = np.array([np.sum(w[p==i][:,None] * (xyz[p==i] - cen[i])**2) for i in range(npatch)])
counts = np.array([np.sum(w[p==i]) for i in range(npatch)])
print('With alternate algorithm:')
print('time = ',t1-t0)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
assert np.sum(inertia) < 33000.
assert np.std(inertia) < 0.1 * np.mean(inertia) # rms should be even smaller here.
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
# Finally, use a field with lots of top level cells to check the other branch in
# InitializeCenters.
field = cat.getNField(min_top=10)
t0 = time.time()
p, cen = field.run_kmeans(npatch)
t1 = time.time()
assert len(p) == cat.ntot
assert min(p) == 0
assert max(p) == npatch-1
inertia = np.array([np.sum(w[p==i][:,None] * (xyz[p==i] - cen[i])**2) for i in range(npatch)])
counts = np.array([np.sum(w[p==i]) for i in range(npatch)])
# This doesn't give as good an initialization, so these are a bit worse usually.
print('With min_top=10:')
print('time = ',t1-t0)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
assert np.sum(inertia) < 33000.
assert np.std(inertia) < 0.4 * np.mean(inertia) # I've seen over 0.3 x mean here.
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
@timer
def test_2d():
# Like the above, but using x,y positions.
# An additional check here is that this works with other fields besides NField, even though
# in practice NField will alsmost always be the kind of Field used.
ngal = 100000
s = 1.
rng = np.random.RandomState(8675309)
x = rng.normal(0,s, (ngal,) )
y = rng.normal(0,s, (ngal,) )
w = rng.random_sample(ngal) + 1
g1 = rng.normal(0,s, (ngal,) )
g2 = rng.normal(0,s, (ngal,) )
k = rng.normal(0,s, (ngal,) )
cat = treecorr.Catalog(x=x, y=y, w=w, g1=g1, g2=g2, k=k)
npatch = 111
field = cat.getGField()
t0 = time.time()
p, cen = field.run_kmeans(npatch)
t1 = time.time()
print('patches = ',np.unique(p))
assert len(p) == cat.ntot
assert min(p) == 0
assert max(p) == npatch-1
xy = np.array([x, y]).T
inertia = np.array([np.sum(w[p==i][:,None] * (xy[p==i] - cen[i])**2) for i in range(npatch)])
counts = np.array([np.sum(w[p==i]) for i in range(npatch)])
print('With standard algorithm:')
print('time = ',t1-t0)
print('inertia = ',inertia)
print('counts = ',counts)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
assert np.sum(inertia) < 5300.
assert np.std(inertia) < 0.3 * np.mean(inertia) # rms is usually small mean
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
# Check the alternate algorithm. rms inertia should be lower.
t0 = time.time()
p, cen = field.run_kmeans(npatch, alt=True)
t1 = time.time()
assert len(p) == cat.ntot
assert min(p) == 0
assert max(p) == npatch-1
inertia = np.array([np.sum(w[p==i][:,None] * (xy[p==i] - cen[i])**2) for i in range(npatch)])
counts = np.array([np.sum(w[p==i]) for i in range(npatch)])
print('With alternate algorithm:')
print('time = ',t1-t0)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
assert np.sum(inertia) < 5300.
assert np.std(inertia) < 0.1 * np.mean(inertia) # rms should be even smaller here.
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
# Finally, use a field with lots of top level cells to check the other branch in
# InitializeCenters.
field = cat.getKField(min_top=10)
t0 = time.time()
p, cen = field.run_kmeans(npatch)
t1 = time.time()
assert len(p) == cat.ntot
assert min(p) == 0
assert max(p) == npatch-1
inertia = np.array([np.sum(w[p==i][:,None] * (xy[p==i] - cen[i])**2) for i in range(npatch)])
counts = np.array([np.sum(w[p==i]) for i in range(npatch)])
# This doesn't give as good an initialization, so these are a bit worse usually.
print('With min_top=10:')
print('time = ',t1-t0)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
assert np.sum(inertia) < 5300.
assert np.std(inertia) < 0.4 * np.mean(inertia) # I've seen over 0.3 x mean here.
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
@timer
def test_init_random():
# Test the init=random option
ngal = 100000
s = 1.
rng = np.random.RandomState(8675309)
x = rng.normal(0,s, (ngal,) )
y = rng.normal(0,s, (ngal,) )
z = rng.normal(0,s, (ngal,) )
cat = treecorr.Catalog(x=x, y=y, z=z)
xyz = np.array([x, y, z]).T
# Skip the refine_centers step.
print('3d with init=random')
npatch = 10
field = cat.getNField()
cen1 = field.kmeans_initialize_centers(npatch, 'random')
assert cen1.shape == (npatch, 3)
p1 = field.kmeans_assign_patches(cen1)
print('patches = ',np.unique(p1))
assert len(p1) == cat.ntot
assert min(p1) == 0
assert max(p1) == npatch-1
inertia1 = np.array([np.sum((xyz[p1==i] - cen1[i])**2) for i in range(npatch)])
counts1 = np.array([np.sum(p1==i) for i in range(npatch)])
print('counts = ',counts1)
print('rms counts = ',np.std(counts1))
print('total inertia = ',np.sum(inertia1))
# Now run the normal way
# Use higher max_iter, since random isn't a great initialization.
p2, cen2 = field.run_kmeans(npatch, init='random', max_iter=1000)
inertia2 = np.array([np.sum((xyz[p2==i] - cen2[i])**2) for i in range(npatch)])
counts2 = np.array([np.sum(p2==i) for i in range(npatch)])
print('rms counts => ',np.std(counts2))
print('total inertia => ',np.sum(inertia2))
assert np.sum(inertia2) < np.sum(inertia1)
# Use a field with lots of top level cells
print('3d with init=random, min_top=10')
field = cat.getNField(min_top=10)
cen1 = field.kmeans_initialize_centers(npatch, 'random')
assert cen1.shape == (npatch, 3)
p1 = field.kmeans_assign_patches(cen1)
print('patches = ',np.unique(p1))
assert len(p1) == cat.ntot
assert min(p1) == 0
assert max(p1) == npatch-1
inertia1 = np.array([np.sum((xyz[p1==i] - cen1[i])**2) for i in range(npatch)])
counts1 = np.array([np.sum(p1==i) for i in range(npatch)])
print('counts = ',counts1)
print('rms counts = ',np.std(counts1))
print('total inertia = ',np.sum(inertia1))
# Now run the normal way
p2, cen2 = field.run_kmeans(npatch, init='random', max_iter=1000)
inertia2 = np.array([np.sum((xyz[p2==i] - cen2[i])**2) for i in range(npatch)])
counts2 = np.array([np.sum(p2==i) for i in range(npatch)])
print('rms counts => ',np.std(counts2))
print('total inertia => ',np.sum(inertia2))
assert np.sum(inertia2) < np.sum(inertia1)
# Repeat in 2d
print('2d with init=random')
cat = treecorr.Catalog(x=x, y=y)
xy = np.array([x, y]).T
field = cat.getNField()
cen1 = field.kmeans_initialize_centers(npatch, 'random')
assert cen1.shape == (npatch, 2)
p1 = field.kmeans_assign_patches(cen1)
print('patches = ',np.unique(p1))
assert len(p1) == cat.ntot
assert min(p1) == 0
assert max(p1) == npatch-1
inertia1 = np.array([np.sum((xy[p1==i] - cen1[i])**2) for i in range(npatch)])
counts1 = np.array([np.sum(p1==i) for i in range(npatch)])
print('counts = ',counts1)
print('rms counts = ',np.std(counts1))
print('total inertia = ',np.sum(inertia1))
# Now run the normal way
p2, cen2 = field.run_kmeans(npatch, init='random', max_iter=1000)
inertia2 = np.array([np.sum((xy[p2==i] - cen2[i])**2) for i in range(npatch)])
counts2 = np.array([np.sum(p2==i) for i in range(npatch)])
print('rms counts => ',np.std(counts2))
print('total inertia => ',np.sum(inertia2))
assert np.sum(inertia2) < np.sum(inertia1)
# Repeat in spherical
print('spher with init=random')
ra, dec = coord.CelestialCoord.xyz_to_radec(x,y,z)
cat = treecorr.Catalog(ra=ra, dec=dec, ra_units='rad', dec_units='rad')
xyz = np.array([cat.x, cat.y, cat.z]).T
field = cat.getNField()
cen1 = field.kmeans_initialize_centers(npatch, 'random')
assert cen1.shape == (npatch, 3)
p1 = field.kmeans_assign_patches(cen1)
print('patches = ',np.unique(p1))
assert len(p1) == cat.ntot
assert min(p1) == 0
assert max(p1) == npatch-1
inertia1 = np.array([np.sum((xyz[p1==i] - cen1[i])**2) for i in range(npatch)])
counts1 = np.array([np.sum(p1==i) for i in range(npatch)])
print('counts = ',counts1)
print('rms counts = ',np.std(counts1))
print('total inertia = ',np.sum(inertia1))
# Now run the normal way
p2, cen2 = field.run_kmeans(npatch, init='random', max_iter=1000)
inertia2 = np.array([np.sum((xyz[p2==i] - cen2[i])**2) for i in range(npatch)])
counts2 = np.array([np.sum(p2==i) for i in range(npatch)])
print('rms counts => ',np.std(counts2))
print('total inertia => ',np.sum(inertia2))
assert np.sum(inertia2) < np.sum(inertia1)
with assert_raises(ValueError):
field.run_kmeans(npatch, init='invalid')
with assert_raises(ValueError):
field.kmeans_initialize_centers(npatch, init='invalid')
with assert_raises(ValueError):
field.kmeans_initialize_centers(npatch=ngal*2, init='random')
with assert_raises(ValueError):
field.kmeans_initialize_centers(npatch=ngal+1, init='random')
with assert_raises(ValueError):
field.kmeans_initialize_centers(npatch=0, init='random')
with assert_raises(ValueError):
field.kmeans_initialize_centers(npatch=-100, init='random')
# Should be valid to give npatch = 1, although not particularly useful.
cen_1 = field.kmeans_initialize_centers(npatch=1, init='random')
p_1 = field.kmeans_assign_patches(cen_1)
np.testing.assert_equal(p_1, np.zeros(ngal))
# If same number of patches as galaxies, each galaxy gets a patch.
# (This is stupid of course, but check that it doesn't fail.)
# Do this with fewer points though, since it's not particularly fast with N=10^5.
n = 100
cat = treecorr.Catalog(ra=ra[:n], dec=dec[:n], ra_units='rad', dec_units='rad')
field = cat.getNField()
cen_n = field.kmeans_initialize_centers(npatch=n, init='random')
p_n = field.kmeans_assign_patches(cen_n)
np.testing.assert_equal(sorted(p_n), list(range(n)))
@timer
def test_init_kmpp():
# Test the init=random option
ngal = 100000
s = 1.
rng = np.random.RandomState(8675309)
x = rng.normal(0,s, (ngal,) )
y = rng.normal(0,s, (ngal,) )
z = rng.normal(0,s, (ngal,) )
cat = treecorr.Catalog(x=x, y=y, z=z)
xyz = np.array([x, y, z]).T
# Skip the refine_centers step.
print('3d with init=kmeans++')
npatch = 10
field = cat.getNField()
cen1 = field.kmeans_initialize_centers(npatch, 'kmeans++')
assert cen1.shape == (npatch, 3)
p1 = field.kmeans_assign_patches(cen1)
print('patches = ',np.unique(p1))
assert len(p1) == cat.ntot
assert min(p1) == 0
assert max(p1) == npatch-1
inertia1 = np.array([np.sum((xyz[p1==i] - cen1[i])**2) for i in range(npatch)])
counts1 = np.array([np.sum(p1==i) for i in range(npatch)])
print('counts = ',counts1)
print('rms counts = ',np.std(counts1))
print('total inertia = ',np.sum(inertia1))
# Now run the normal way
# Use higher max_iter, since random isn't a great initialization.
p2, cen2 = field.run_kmeans(npatch, init='kmeans++', max_iter=1000)
inertia2 = np.array([np.sum((xyz[p2==i] - cen2[i])**2) for i in range(npatch)])
counts2 = np.array([np.sum(p2==i) for i in range(npatch)])
print('rms counts => ',np.std(counts2))
print('total inertia => ',np.sum(inertia2))
assert np.sum(inertia2) < np.sum(inertia1)
# Use a field with lots of top level cells
print('3d with init=kmeans++, min_top=10')
field = cat.getNField(min_top=10)
cen1 = field.kmeans_initialize_centers(npatch, 'kmeans++')
assert cen1.shape == (npatch, 3)
p1 = field.kmeans_assign_patches(cen1)
print('patches = ',np.unique(p1))
assert len(p1) == cat.ntot
assert min(p1) == 0
assert max(p1) == npatch-1
inertia1 = np.array([np.sum((xyz[p1==i] - cen1[i])**2) for i in range(npatch)])
counts1 = np.array([np.sum(p1==i) for i in range(npatch)])
print('counts = ',counts1)
print('rms counts = ',np.std(counts1))
print('total inertia = ',np.sum(inertia1))
# Now run the normal way
p2, cen2 = field.run_kmeans(npatch, init='kmeans++', max_iter=1000)
inertia2 = np.array([np.sum((xyz[p2==i] - cen2[i])**2) for i in range(npatch)])
counts2 = np.array([np.sum(p2==i) for i in range(npatch)])
print('rms counts => ',np.std(counts2))
print('total inertia => ',np.sum(inertia2))
assert np.sum(inertia2) < np.sum(inertia1)
# Repeat in 2d
print('2d with init=kmeans++')
cat = treecorr.Catalog(x=x, y=y)
xy = np.array([x, y]).T
field = cat.getNField()
cen1 = field.kmeans_initialize_centers(npatch, 'kmeans++')
assert cen1.shape == (npatch, 2)
p1 = field.kmeans_assign_patches(cen1)
print('patches = ',np.unique(p1))
assert len(p1) == cat.ntot
assert min(p1) == 0
assert max(p1) == npatch-1
inertia1 = np.array([np.sum((xy[p1==i] - cen1[i])**2) for i in range(npatch)])
counts1 = np.array([np.sum(p1==i) for i in range(npatch)])
print('counts = ',counts1)
print('rms counts = ',np.std(counts1))
print('total inertia = ',np.sum(inertia1))
# Now run the normal way
p2, cen2 = field.run_kmeans(npatch, init='kmeans++', max_iter=1000)
inertia2 = np.array([np.sum((xy[p2==i] - cen2[i])**2) for i in range(npatch)])
counts2 = np.array([np.sum(p2==i) for i in range(npatch)])
print('rms counts => ',np.std(counts2))
print('total inertia => ',np.sum(inertia2))
assert np.sum(inertia2) < np.sum(inertia1)
# Repeat in spherical
print('spher with init=kmeans++')
ra, dec = coord.CelestialCoord.xyz_to_radec(x,y,z)
cat = treecorr.Catalog(ra=ra, dec=dec, ra_units='rad', dec_units='rad')
xyz = np.array([cat.x, cat.y, cat.z]).T
field = cat.getNField()
cen1 = field.kmeans_initialize_centers(npatch, 'kmeans++')
assert cen1.shape == (npatch, 3)
p1 = field.kmeans_assign_patches(cen1)
print('patches = ',np.unique(p1))
assert len(p1) == cat.ntot
assert min(p1) == 0
assert max(p1) == npatch-1
inertia1 = np.array([np.sum((xyz[p1==i] - cen1[i])**2) for i in range(npatch)])
counts1 = np.array([np.sum(p1==i) for i in range(npatch)])
print('counts = ',counts1)
print('rms counts = ',np.std(counts1))
print('total inertia = ',np.sum(inertia1))
# Now run the normal way
p2, cen2 = field.run_kmeans(npatch, init='kmeans++', max_iter=1000)
inertia2 = np.array([np.sum((xyz[p2==i] - cen2[i])**2) for i in range(npatch)])
counts2 = np.array([np.sum(p2==i) for i in range(npatch)])
print('rms counts => ',np.std(counts2))
print('total inertia => ',np.sum(inertia2))
assert np.sum(inertia2) < np.sum(inertia1)
with assert_raises(ValueError):
field.kmeans_initialize_centers(npatch=ngal*2, init='kmeans++')
with assert_raises(ValueError):
field.kmeans_initialize_centers(npatch=ngal+1, init='kmeans++')
with assert_raises(ValueError):
field.kmeans_initialize_centers(npatch=0, init='kmeans++')
with assert_raises(ValueError):
field.kmeans_initialize_centers(npatch=-100, init='kmeans++')
# Should be valid to give npatch = 1, although not particularly useful.
cen_1 = field.kmeans_initialize_centers(npatch=1, init='kmeans++')
p_1 = field.kmeans_assign_patches(cen_1)
np.testing.assert_equal(p_1, np.zeros(ngal))
# If same number of patches as galaxies, each galaxy gets a patch.
# (This is stupid of course, but check that it doesn't fail.)
# Do this with fewer points though, since it's not particularly fast with N=10^5.
n = 100
cat = treecorr.Catalog(ra=ra[:n], dec=dec[:n], ra_units='rad', dec_units='rad')
field = cat.getNField()
cen_n = field.kmeans_initialize_centers(npatch=n, init='kmeans++')
p_n = field.kmeans_assign_patches(cen_n)
np.testing.assert_equal(sorted(p_n), list(range(n)))
@timer
def test_zero_weight():
# Based on test_ra_dec, but where many galaxies have w=0.
# There used to be a bug where w=0 objects were not assigned to any patch.
ngal = 10000
s = 10.
rng = np.random.RandomState(8675309)
x = rng.normal(0,s, (ngal,) )
y = rng.normal(0,s, (ngal,) ) + 100 # Put everything at large y, so smallish angle on sky
z = rng.normal(0,s, (ngal,) )
w = np.zeros(ngal)
w[np.random.choice(range(ngal), ngal//10, replace=False)] = 1.0
ra, dec = coord.CelestialCoord.xyz_to_radec(x,y,z)
print('minra = ',np.min(ra) * coord.radians / coord.degrees)
print('maxra = ',np.max(ra) * coord.radians / coord.degrees)
print('mindec = ',np.min(dec) * coord.radians / coord.degrees)
print('maxdec = ',np.max(dec) * coord.radians / coord.degrees)
cat = treecorr.Catalog(ra=ra, dec=dec, ra_units='rad', dec_units='rad', w=w,
keep_zero_weight=True)
treecorr.set_omp_threads(1)
npatch = 16
field = cat.getNField()
t0 = time.time()
p, c = field.run_kmeans(npatch)
t1 = time.time()
print('patches = ',np.unique(p))
assert len(p) == cat.ntot
assert min(p) == 0
assert max(p) == npatch-1
print('w>0 patches = ',np.unique(p[w>0]))
print('w==0 patches = ',np.unique(p[w==0]))
assert set(p[w>0]) == set(p[w==0])
@timer
def test_catalog_sphere():
# This follows the same path as test_radec, but using the Catalog API to run kmeans.
ngal = 100000
s = 10.
rng = np.random.RandomState(8675309)
x = rng.normal(0,s, (ngal,) )
y = rng.normal(0,s, (ngal,) ) + 100 # Put everything at large y, so smallish angle on sky
z = rng.normal(0,s, (ngal,) )
w = rng.random_sample(ngal)
ra, dec, r = coord.CelestialCoord.xyz_to_radec(x,y,z, return_r=True)
print('minra = ',np.min(ra) * coord.radians / coord.degrees)
print('maxra = ',np.max(ra) * coord.radians / coord.degrees)
print('mindec = ',np.min(dec) * coord.radians / coord.degrees)
print('maxdec = ',np.max(dec) * coord.radians / coord.degrees)
npatch = 111
cat = treecorr.Catalog(ra=ra, dec=dec, ra_units='rad', dec_units='rad', w=w, npatch=npatch)
t0 = time.time()
p = cat.patch
cen = cat.patch_centers
t1 = time.time()
print('patches = ',np.unique(p))
assert len(p) == cat.ntot
assert min(p) == 0
assert max(p) == npatch-1
# Check the returned center to a direct calculation.
xyz = np.array([cat.x, cat.y, cat.z]).T
direct_cen = np.array([np.average(xyz[p==i], axis=0, weights=w[p==i]) for i in range(npatch)])
direct_cen /= np.sqrt(np.sum(direct_cen**2,axis=1)[:,np.newaxis])
np.testing.assert_allclose(cen, direct_cen, atol=2.e-3)
inertia = np.array([np.sum(w[p==i][:,None] * (xyz[p==i] - cen[i])**2) for i in range(npatch)])
counts = np.array([np.sum(w[p==i]) for i in range(npatch)])
print('With standard algorithm:')
print('time = ',t1-t0)
print('inertia = ',inertia)
print('counts = ',counts)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
assert np.sum(inertia) < 200. # This is specific to this particular field and npatch.
assert np.std(inertia) < 0.3 * np.mean(inertia) # rms is usually small mean
# With weights, these aren't actually all that similar. The range is more than a
# factor of 10. I think because it varies whether high weight points happen to be near the
# edges or middles of patches, so the total weight varies when you target having the
# inertias be relatively similar.
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
# Check the alternate algorithm. rms inertia should be lower.
cat2 = treecorr.Catalog(ra=ra, dec=dec, ra_units='rad', dec_units='rad', w=w,
npatch=npatch, kmeans_alt=True)
t0 = time.time()
p = cat2.patch
cen = cat2.patch_centers
t1 = time.time()
assert len(p) == cat2.ntot
assert min(p) == 0
assert max(p) == npatch-1
inertia = np.array([np.sum(w[p==i][:,None] * (xyz[p==i] - cen[i])**2) for i in range(npatch)])
counts = np.array([np.sum(w[p==i]) for i in range(npatch)])
print('With alternate algorithm:')
print('time = ',t1-t0)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
assert np.sum(inertia) < 200. # Total shouldn't increase much. (And often decreases.)
assert np.std(inertia) < 0.15 * np.mean(inertia) # rms should be even smaller here.
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
# Check using patch_centers from (ra,dec) -> (ra,dec,r)
cat3 = treecorr.Catalog(ra=ra, dec=dec, r=r, ra_units='rad', dec_units='rad', w=w,
patch_centers=cat2.patch_centers)
np.testing.assert_array_equal(cat2.patch, cat3.patch)
np.testing.assert_array_equal(cat2.patch_centers, cat3.patch_centers)
@timer
def test_catalog_3d():
# With ra, dec, r, the Catalog API should only do patches using RA, Dec.
ngal = 100000
s = 10.
rng = np.random.RandomState(8675309)
x = rng.normal(0,s, (ngal,) )
y = rng.normal(0,s, (ngal,) ) + 100 # Put everything at large y, so smallish angle on sky
z = rng.normal(0,s, (ngal,) )
w = rng.random_sample(ngal)
ra, dec, r = coord.CelestialCoord.xyz_to_radec(x,y,z, return_r=True)
print('minra = ',np.min(ra) * coord.radians / coord.degrees)
print('maxra = ',np.max(ra) * coord.radians / coord.degrees)
print('mindec = ',np.min(dec) * coord.radians / coord.degrees)
print('maxdec = ',np.max(dec) * coord.radians / coord.degrees)
npatch = 111
cat = treecorr.Catalog(ra=ra, dec=dec, r=r, ra_units='rad', dec_units='rad', w=w,
npatch=npatch)
t0 = time.time()
p = cat.patch
cen = cat.patch_centers
t1 = time.time()
print('patches = ',np.unique(p))
assert len(p) == cat.ntot
assert min(p) == 0
assert max(p) == npatch-1
# Check the returned center to a direct calculation.
xyz = np.array([cat.x/cat.r, cat.y/cat.r, cat.z/cat.r]).T
print('cen = ',cen)
print('xyz = ',xyz)
direct_cen = np.array([np.average(xyz[p==i], axis=0, weights=w[p==i]) for i in range(npatch)])
direct_cen /= np.sqrt(np.sum(direct_cen**2,axis=1)[:,np.newaxis])
np.testing.assert_allclose(cen, direct_cen, atol=2.e-3)
inertia = np.array([np.sum(w[p==i][:,None] * (xyz[p==i] - cen[i])**2) for i in range(npatch)])
counts = np.array([np.sum(w[p==i]) for i in range(npatch)])
print('With standard algorithm:')
print('time = ',t1-t0)
print('inertia = ',inertia)
print('counts = ',counts)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
assert np.sum(inertia) < 200. # This is specific to this particular field and npatch.
assert np.std(inertia) < 0.3 * np.mean(inertia) # rms is usually small mean
# With weights, these aren't actually all that similar. The range is more than a
# factor of 10. I think because it varies whether high weight points happen to be near the
# edges or middles of patches, so the total weight varies when you target having the
# inertias be relatively similar.
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
# Check the alternate algorithm. rms inertia should be lower.
cat2 = treecorr.Catalog(ra=ra, dec=dec, r=r, ra_units='rad', dec_units='rad', w=w,
npatch=npatch, kmeans_alt=True)
t0 = time.time()
p = cat2.patch
cen = cat2.patch_centers
t1 = time.time()
assert len(p) == cat2.ntot
assert min(p) == 0
assert max(p) == npatch-1
inertia = np.array([np.sum(w[p==i][:,None] * (xyz[p==i] - cen[i])**2) for i in range(npatch)])
counts = np.array([np.sum(w[p==i]) for i in range(npatch)])
print('With alternate algorithm:')
print('time = ',t1-t0)
print('total inertia = ',np.sum(inertia))
print('mean inertia = ',np.mean(inertia))
print('rms inertia = ',np.std(inertia))
assert np.sum(inertia) < 200. # Total shouldn't increase much. (And often decreases.)
assert np.std(inertia) < 0.15 * np.mean(inertia) # rms should be even smaller here.
print('mean counts = ',np.mean(counts))
print('min counts = ',np.min(counts))
print('max counts = ',np.max(counts))
# Check using patch_centers from (ra,dec,r) -> (ra,dec)
cat3 = treecorr.Catalog(ra=ra, dec=dec, ra_units='rad', dec_units='rad', w=w,
patch_centers=cat2.patch_centers)
np.testing.assert_array_equal(cat2.patch, cat3.patch)
np.testing.assert_array_equal(cat2.patch_centers, cat3.patch_centers)
if __name__ == '__main__':
test_dessv()
test_radec()
test_3d()
test_2d()
test_init_random()
test_init_kmpp()
test_zero_weight()
test_catalog_sphere()
test_catalog_3d()
| 40.381497
| 100
| 0.62965
| 6,105
| 38,847
| 3.957576
| 0.062899
| 0.028145
| 0.018128
| 0.033235
| 0.910062
| 0.905633
| 0.904184
| 0.899673
| 0.897479
| 0.891726
| 0
| 0.028805
| 0.204649
| 38,847
| 961
| 101
| 40.423517
| 0.75318
| 0.163256
| 0
| 0.866756
| 0
| 0
| 0.109282
| 0
| 0
| 0
| 0
| 0
| 0.204576
| 1
| 0.012113
| false
| 0
| 0.013459
| 0
| 0.026918
| 0.317631
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e1088c6b0f484154235d71ff13cd3c654c3a9dab
| 43,598
|
py
|
Python
|
curiosity/interaction/update_step.py
|
neuroailab/curiosity_deprecated
|
65f7cde13b07cdac52eed39535a94e7544c396b8
|
[
"Apache-2.0"
] | null | null | null |
curiosity/interaction/update_step.py
|
neuroailab/curiosity_deprecated
|
65f7cde13b07cdac52eed39535a94e7544c396b8
|
[
"Apache-2.0"
] | 2
|
2017-11-18T00:53:33.000Z
|
2017-11-18T00:53:40.000Z
|
curiosity/interaction/update_step.py
|
neuroailab/curiosity_deprecated
|
65f7cde13b07cdac52eed39535a94e7544c396b8
|
[
"Apache-2.0"
] | null | null | null |
'''
Defines the training step.
'''
import sys
sys.path.append('tfutils')
import tensorflow as tf
from tfutils.base import get_optimizer, get_learning_rate
import numpy as np
import cv2
from curiosity.interaction import models
import h5py
import json
class RawDepthDiscreteActionUpdater:
'''
Provides the training step.
This is probably where we can put parallelization.
Not finished!
'''
def __init__(world_model, rl_model, data_provider, eta):
self.data_provider = data_provider
self.world_model = world_model
self.rl_model = rl_model
self.eta = eta
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.action = tf.placeholder = tf.placeholder(tf.float32, [None] + world_model.action_one_hot.get_shape().as_list()[1:])
self.adv = tf.placeholder(tf.float32, [None])
self.r = tf.placeholder(tf.float32, [None])
log_prob_tf = tf.nn.log_softmax(rl_model.logits)
prob_tf = tf.nn.softmax(rl_model.logits)
pi_loss = -tf.reduce_sum(tf.reduce_sum(log_prob_tf * self.ac, [1]) * self.adv)
vf_loss = .5 * tf.reduce_sum(tf.square(rl_model.vf - self.r))
entropy = -tf.reduce_sum(prob_tf * log_prob_tf)
self.rl_loss = pi_loss + 0.5 * vf_loss - entropy * 0.01
rl_opt_params, rl_opt = get_optimizer(learning_rate, self.rl_loss, )
def replace_the_nones(my_list):
'''
Assumes my_list[-1] is np array
'''
return [np.zeros(my_list[-1].shape, dtype = my_list[-1].dtype) if elt is None else elt for elt in my_list]
def postprocess_batch_depth(batch, state_desc):
obs, msg, act, act_post = batch
depths = replace_the_nones(obs[state_desc])
obs_past = np.array([depths[:-1]])
obs_fut = np.array([depths[1:]])
actions = np.array([replace_the_nones(act)])
actions_post = np.array([replace_the_nones(act_post)])
return obs_past, actions, actions_post, obs_fut
# def postprocess_batch_depth(batch):
# depths = np.array([[timepoint if timepoint is not None else np.zeros(obs['depths1'][-1].shape, dtype = obs['depths1'][-1].dtype) for timepoint in obs['depths1']] for obs in batch.states])
# actions = np.array(batch.actions)
# next_depth = np.array([batch.next_state['depths1']])
# return depths, actions, next_depth
def postprocess_batch_for_actionmap(batch, state_desc):
obs, msg, act = batch
prepped = {}
depths = replace_the_nones(obs[state_desc])
depths_past = np.array([depths[:-1]])
depths_fut = np.array([depths[:1]])
objects = np.array([replace_the_nones(obs[state_desc])[:-1]])
actions = np.array([replace_the_nones(act)])
action_ids_list = []
for i in range(2):
action_msg = msg[i]['msg']['actions'] if msg[i] is not None else []
if len(action_msg):
idx = int(action_msg[0]['id'])
else:
idx = -10000#just something that's not an id seen
action_ids_list.append(idx)
action_ids = np.array([action_ids_list])
return depths_past, objects, actions, action_ids, depths_fut
# def postprocess_batch_for_actionmap(batch):
# prepped = {}
# for desc in ['depths1', 'objects1']:
# prepped[desc] = np.array([[timepoint if timepoint is not None else np.zeros(obs[desc][-1].shape, dtype = obs[desc][-1].dtype) for timepoint in obs[desc]] for obs in batch.states])
# actions = np.array([[np.zeros(batch.next_state['action'][-1].shape, batch.next_state['action'][-1].dtype) if timepoint is None else timepoint for timepoint in batch.next_state['action']]])
# print('actions shape')
# print(actions.shape)
# print(len(batch.next_state['action']))
# action_ids_list = []
# for i in range(2):
# action_msg = batch.next_state['msg'][i]['msg']['actions'] if batch.next_state['msg'][i] is not None else []
# if len(action_msg):
# idx = int(action_msg[0]['id'])
# action_ids_list.append(idx)
# action_ids = np.array([action_ids_list])
# next_depths = np.array([batch.next_state['depths1']])
# return prepped['depths1'], prepped['objects1'], actions, action_ids, next_depths
class ExperienceReplayPostprocessor:
def __init__(self, big_save_keys = None, little_save_keys = None, big_save_len = None, big_save_freq = None, state_descriptor = None):
self.big_save_keys = big_save_keys
self.little_save_keys = little_save_keys
self.big_save_len = big_save_len
self.big_save_freq = big_save_freq
self.state_descriptor = state_descriptor
self.big_save_keys.append('map_draw')
self.little_save_keys.append('map_draw')
self.big_save_keys.extend(['act_lr', 'um_lr'])
self.little_save_keys.extend(['act_lr', 'um_lr'])
def postprocess(self, training_results, batch):
global_step = training_results['global_step']
res = {}
if (global_step) % self.big_save_freq < self.big_save_len:
save_keys = self.big_save_keys
#est_losses = [other[1] for other in batch['other']]
#action_sample = [other[2] for other in batch['other']]
res['batch'] = {}
for desc, val in batch.iteritems():
if desc not in ['recent', 'depths1', 'objects1', 'images1']:
res['batch'][desc] = val
res['recent'] = batch['recent']
else:
save_keys = self.little_save_keys
res.update(dict(pair for pair in training_results.iteritems() if pair[0] in save_keys))
#if 'other' in batch['recent']:
# entropies = [other[0] for other in batch['recent']['other']]
# entropies = np.mean(entropies)
# res['entropy'] = entropies
if 'msg' in batch['recent']:
looking_at_obj = [1 if msg is not None and msg['msg']['action_type'] == 'OBJ_ACT' else 0 for msg in batch['recent']['msg']]
res['obj_freq'] = np.mean(looking_at_obj)
elif type(batch['recent']) == list and len(batch['recent'][0]) > 0:
mean_per_provider = []
for provider_recent in batch['recent']:
looking_at_obj = [1 if msg is not None and msg['msg']['action_type'] == 'OBJ_ACT' else 0 for msg in provider_recent['msg']]
mean_per_provider.append(np.mean(looking_at_obj))
res['obj_freq'] = np.mean(mean_per_provider)
res['obj_freq_per_provider_noprint'] = mean_per_provider
return res
class UncertaintyPostprocessor:
def __init__(self, big_save_keys = None, little_save_keys = None, big_save_len = None, big_save_freq = None, state_descriptor = None):
self.big_save_keys = big_save_keys
self.little_save_keys = little_save_keys
self.big_save_len = big_save_len
self.big_save_freq = big_save_freq
self.state_descriptor = state_descriptor
def postprocess(self, training_results, batch):
global_step = training_results['global_step']
res = {}
print('postprocessor deets')
print(global_step)
print(self.big_save_freq)
print(self.big_save_len)
if (global_step) % self.big_save_freq < self.big_save_len:
print('big time')
save_keys = self.big_save_keys
est_losses = [other[1] for other in batch['recent']['other']]
action_sample = [other[2] for other in batch['recent']['other']]
res['batch'] = {'obs' : batch['depths1'], 'act' : batch['action'], 'act_post' : batch['action_post'], 'est_loss' : est_losses, 'action_sample' : action_sample}
res['msg'] = batch['recent']['msg']
else:
print('little time')
save_keys = self.little_save_keys
res.update(dict((k, v) for (k, v) in training_results.iteritems() if k in save_keys))
#res['msg'] = batch['msg'][-1]
entropies = [other[0] for other in batch['recent']['other']]
entropies = np.mean(entropies)
res['entropy'] = entropies
looking_at_obj = [1 if msg is not None and msg['msg']['action_type']['OBJ_ACT'] else 0 for msg in batch['recent']['msg']]
res['obj_freq'] = np.mean(looking_at_obj)
return res
class DataWriteUpdater:
def __init__(self, data_provider, updater_params):
self.data_provider = data_provider
fn = updater_params['hdf5_filename']
N = updater_params['N_save']
height, width = updater_params['image_shape']
act_dim = updater_params['act_dim']
print('setting up save loc')
self.hdf5 = hdf5 = h5py.File(fn, mode = 'a')
dt = h5py.special_dtype(vlen = str)
self.handles = {'msg' : hdf5.require_dataset('msg', shape = (N,), dtype = dt),
'depths1' : hdf5.require_dataset('depths1', shape = (N, height, width, 3), dtype = np.uint8),
'objects1' : hdf5.require_dataset('objects1', shape = (N, height, width, 3), dtype = np.uint8),
'images1': hdf5.require_dataset('images1', shape = (N, height, width, 3), dtype = np.uint8),
'action' : hdf5.require_dataset('action', shape = (N, act_dim), dtype = np.float32),
'action_post' : hdf5.require_dataset('action_post', shape = (N, act_dim), dtype = np.float32)}
print('save loc set up')
self.start = 0
def update(self):
batch = self.data_provider.dequeue_batch()
bs = len(batch['recent']['msg'])
end = self.start + bs
for k in ['depths1', 'objects1', 'images1', 'action', 'action_post']:
tosave = batch['recent'][k]
if k in ['action', 'action_post']:
tosave = tosave.astype(np.float32)
self.handles[k][self.start : end] = batch['recent'][k]
self.handles['msg'][self.start : end] = [json.dumps(msg) for msg in batch['recent']['msg']]
self.start = end
def close(self):
self.hdf5.close()
class LatentUncertaintyValidator:
def __init__(self, models, data_provider):
self.um = models['uncertainty_model']
self.wm = models['world_model']
self.targets = {
'act_pred' : self.wm.act_pred,
'fut_loss' : self.wm.fut_loss, 'act_loss' : self.wm.act_loss, 'um_loss' : self.um.uncertainty_loss,
'estimated_world_loss' : self.um.estimated_world_loss, 'loss_per_example' : self.um.true_loss,
'act_loss_per_example' : self.wm.act_loss_per_example
}
self.dp = data_provider
def run(self, sess):
batch = self.dp.dequeue_batch()
feed_dict = {
self.wm.states : batch['depths1'],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post'],
self.wm.obj_there : batch['obj_there']
}
res = sess.run(self.targets, feed_dict = feed_dict)
res['batch'] = {}
for desc, val in batch.iteritems():
print(desc)
if desc == 'obj_there':
res['batch'][desc] = val
elif desc != 'recent':
res['batch'][desc] = val[:, -1]
res['recent'] = batch['recent']
class ObjectThereValidater:
def __init__(self, models, data_provider):
self.um = models['uncertainty_model']
self.wm = models['world_model']
self.targets = {'um_loss' : self.um.uncertainty_loss, 'loss_per_example' : self.um.true_loss,
'estimated_world_loss' : self.um.estimated_world_loss}
self.dp = data_provider
def run(self, sess):
batch = self.dp.dequeue_batch()
feed_dict = {
self.wm.states : batch['depths1'],
self.wm.action : batch['action'],
self.wm.obj_there : batch['obj_there']
}
return sess.run(self.targets, feed_dict = feed_dict)
class ActionUncertaintyValidator:
def __init__(self, models, data_provider):
self.um = um = models['uncertainty_model']
self.wm = wm = models['world_model']
self.targets = {'act_pred' : self.wm.act_pred, 'act_loss' : self.wm.act_loss,
'estimated_world_loss' : self.um.estimated_world_loss,
'um_loss' : self.um.uncertainty_loss, 'loss_per_example' : self.um.true_loss}
self.dp = data_provider
def run(self, sess):
batch = self.dp.dequeue_batch()
feed_dict = {
self.wm.states : batch['depths1'],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post']
}
res = sess.run(self.targets, feed_dict = feed_dict)
res['batch'] = batch
return res
class ActionUncertaintyValidatorWithReadouts:
def __init__(self, model, data_provider):
self.dp = data_provider
self.wm = model['world_model']
self.um = model['uncertainty_model']
self.targets = {}
self.targets.update({k : v for k, v in self.wm.readouts.items() if k not in self.wm.save_to_gfs})
self.targets.update({k : v for k, v in self.um.readouts.items() if k not in self.um.save_to_gfs})
#this should be changed for an online data provider, set to do nothing
self.map_draw_mode = 'specified_indices'
#relies on there being just one obs type
self.state_desc = data_provider.data_lengths['obs'].keys()[0]
self.insert_objthere = False if data_provider.num_objthere is None else True
def run(self, sess):
batch = self.dp.dequeue_batch()
feed_dict = {
self.wm.states : batch[self.state_desc],
self.wm.action : batch['action'],
self.wm.action_post : batch ['action_post']
}
if self.insert_objthere:
feed_dict[self.wm.obj_there_via_msg] = batch['obj_there']
res = sess.run(self.targets, feed_dict = feed_dict)
#TODO case it for online
res['recent'] = {}
#if self.map_draw_mode == 'specified_indices':
# map_draw_res = []
# for idx in self.map_draw_example_indices:
# obs_for_actor = [batch[self.state_desc][idx][t] for t in self.map_draw_timestep_indices]
# action_samples = self.action_sampler.sample_actions()
# action, entropy, estimated_world_loss = self.um.act(sess, action_samples, obs_for_actor)
# to_add = {'example_id' : idx, 'action_sample' : action, 'estimated_world_loss' : estimated_world_loss,
# 'action_samples' : action_samples, 'depths1' : batch[self.state_desc][idx],
# 'action' : batch['action'][idx], 'action_post' : batch['action_post'][idx]}
# map_draw_res.append(to_add)
#res['map_draw'] = map_draw_res
return res
class ObjectThereUpdater:
def __init__(self, world_model, uncertainty_model, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params):
self.data_provider = data_provider
self.wm = world_model
self.um = uncertainty_model
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.um_lr_params, um_lr = get_learning_rate(self.global_step, ** learning_rate_params['uncertainty_model'])
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'], var_list = self.um.var_list)
self.targets = {'um_loss' : self.um.uncertainty_loss, 'um_lr' : um_lr, 'um_optimizer' : um_opt,
'global_step' : self.global_step, 'loss_per_example' : self.um.true_loss,
'estimated_world_loss' : self.um.estimated_world_loss
}
self.state_desc = updater_params['state_desc']
def update(self, sess, visualize = False):
batch = self.data_provider.dequeue_batch()
state_desc = self.state_desc
feed_dict = {
self.wm.states : batch[state_desc],
self.wm.action : batch['action'],
self.wm.obj_there : batch['obj_there']
}
res = sess.run(self.targets, feed_dict = feed_dict)
res = self.postprocessor.postprocess(res, batch)
return res
class SquareForceMagUpdater:
def __init__(self, models, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params):
self.dp = data_provider
self.wm = models['world_model']
self.um = models['uncertainty_model']
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.um_lr_params, um_lr = get_learning_rate(self.global_step, ** learning_rate_params['uncertainty_model'])
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'], var_list = self.um.var_list)
self.targets = {'um_loss' : self.um.uncertainty_loss, 'um_lr' : um_lr, 'um_optimizer' : um_opt,
'global_step' : self.global_step, 'loss_per_example' : self.um.true_loss,
'estimated_world_loss' : self.um.estimated_world_loss
}
if self.um.exactly_whats_needed:
self.targets['oh_my_god'] = self.um.oh_my_god
self.state_desc = updater_params['state_desc']
def update(self, sess, visualize = False):
batch = self.dp.dequeue_batch()
state_desc = self.state_desc
feed_dict = {
self.wm.states : batch[state_desc],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post']
}
if self.um.insert_obj_there:
print('adding obj_there to feed dict')
feed_dict[self.um.obj_there] = batch['obj_there']
res = sess.run(self.targets, feed_dict = feed_dict)
res = self.postprocessor.postprocess(res, batch)
return res
class DebuggingForceMagUpdater:
def __init__(self, models, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params):
self.dp = data_provider
self.wm = models['world_model']
self.um = models['uncertainty_model']
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0, dtype = tf.int32))
print(learning_rate_params.keys())
um_lr_params, um_lr = get_learning_rate(self.global_step, **learning_rate_params['uncertainty_model'])
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'])
self.targets = {'um_loss' : self.um.uncertainty_loss, 'um_optimizer' : um_opt, 'global_step' : self.global_step,
'loss_per_example' : self.um.true_loss, 'estimated_world_loss' : self.um.estimated_world_loss, 'ans' : self.um.ans,
'oh_my_god' : self.um.oh_my_god, 'model_parameters' : self.um.var_list}
def update(self, sess):
batch = self.dp.dequeue_batch()
feed_dict = {
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post'],
self.um.obj_there : batch['obj_there']
}
res = sess.run(self.targets, feed_dict = feed_dict)
res = self.postprocessor.postprocess(res, batch)
return res
class LatentFreezeUpdater:
def __init__(self, models, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params):
self.data_provider = data_provider\
if isinstance(data_provider, list) else [data_provider]
self.wm = models['world_model']
self.um = models['uncertainty_model']
freeze_wm = updater_params['freeze_wm']
freeze_um = updater_params['freeze_um']
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.act_step = tf.get_variable('act_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.fut_step = tf.get_variable('fut_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.um_step = tf.get_variable('ext_uncertainty_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.targets = {}
self.state_desc = updater_params.get('state_desc', 'depths1')
if not freeze_wm:
act_lr_params, act_lr = get_learning_rate(self.act_step, **learning_rate_params['world_model']['act_model'])
fut_lr_params, fut_lr = get_learning_rate(self.fut_step, **learning_rate_params['world_model']['fut_model'])
act_opt_params, act_opt = get_optimizer(act_lr, self.wm.act_loss, self.act_step, optimizer_params['world_model']['act_model'], var_list = self.wm.act_var_list + self.wm.encode_var_list)
fut_opt_params, fut_opt = get_optimizer(fut_lr, self.wm.fut_loss, self.fut_step, optimizer_params['world_model']['fut_model'], var_list = self.wm.fut_var_list)
self.targets['act_opt'] = act_opt
self.targets['fut_opt'] = fut_opt
self.targets['act_lr'] = act_lr
self.targets['fut_lr'] = fut_lr
if not freeze_um:
um_lr_params, um_lr = get_learning_rate(self.um_step, **learning_rate_params['uncertainty_model'])
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.um_step, optimizer_params['uncertainty_model'], var_list = self.um.var_list)
self.targets['um_opt'] = um_opt
self.targets['um_lr'] = um_lr
self.targets['global_step'] = self.global_step
global_increment = tf.assign_add(self.global_step, 1)
um_increment = tf.assign_add(self.um.step, 1)
self.targets.update({'global_increment' : global_increment, 'um_increment' : um_increment})
self.targets.update(self.wm.readouts)
self.targets.update(self.um.readouts)
assert set(self.wm.readouts.keys()) != set(self.um.readouts.keys())
def update(self, sess, visualize = False):
if self.um.just_random:
print('Selecting action at random')
batch = {}
for i, dp in enumerate(self.data_provider):
provider_batch = dp.dequeue_batch()
for k in provider_batch:
if k in batch:
batch[k].append(provider_batch[k])
else:
batch[k] = [provider_batch[k]]
for k in ['action', 'action_post', self.state_desc]:
batch[k] = np.concatenate(batch[k], axis=0)
feed_dict = {
self.wm.states : batch[self.state_desc],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post']
}
res = sess.run(self.targets, feed_dict = feed_dict)
res.pop('um_increment')
res.pop('global_increment')
global_step = res['global_step']
#if self.map_draw_mode is not None and global_step % self.map_draw_freq == 0:
# if self.map_draw_mode == 'specified_indices':
# map_draw_res = []
# for idx in self.map_draw_example_indices:
# obs_for_actor = [batch[self.state_desc][idx][t] for t in self.map_draw_timestep_indices]
# action_samples = self.action_sampler.sample_actions()
# action, entropy, estimated_world_loss = self.um.act(sess, action_samples, obs_for_actor)
# to_add = {'example_id' : idx, 'action_sample' : action, 'estimated_world_loss' : estimated_world_loss,
# 'action_samples' : action_samples, 'depths1' : batch[self.state_desc][idx],
# 'action' : batch['action'][idx], 'action_post' : batch['action_post'][idx]}
# map_draw_res.append(to_add)
# res['map_draw'] = map_draw_res
res = self.postprocessor.postprocess(res, batch)
return res, global_step
class FreezeUpdater:
def __init__(self, models, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params):
self.data_provider = data_provider \
if isinstance(data_provider, list) else [data_provider]
self.wm = models['world_model']
self.um = models['uncertainty_model']
freeze_wm = updater_params['freeze_wm']
freeze_um = updater_params['freeze_um']
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.act_lr_params, act_lr = get_learning_rate(self.global_step, ** learning_rate_params['world_model']['act_model'])
self.um_lr_params, um_lr = get_learning_rate(self.global_step, ** learning_rate_params['uncertainty_model'])
num_not_frozen = 0
self.targets = {}
self.state_desc = updater_params.get('state_desc', 'depths1')
if not freeze_wm:
num_not_frozen += 1
act_opt_params, act_opt = get_optimizer(act_lr, self.wm.act_loss, self.global_step, optimizer_params['world_model']['act_model'], var_list = self.wm.act_var_list + self.wm.encode_var_list)
self.targets['act_opt'] = act_opt
if not freeze_um:
num_not_frozen += 1
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'], var_list = self.um.var_list)
self.targets['um_opt'] = um_opt
if num_not_frozen == 0:
self.targets['global_step'] = self.global_step
self.targets['increment'] = tf.assign_add(self.global_step, 1)
else:
self.global_step = self.global_step / num_not_frozen
self.targets['global_step'] = self.global_step
self.targets.update({'act_lr' : act_lr, 'um_lr' : um_lr})
assert set(self.wm.readouts.keys()) != set(self.um.readouts.keys())
self.targets.update(self.wm.readouts)
self.targets.update(self.um.readouts)
um_increment = tf.assign_add(self.um.step, 1)
assert 'um_increment' not in self.targets
self.targets['um_increment'] = um_increment
self.obj_there_supervision = updater_params.get('include_obj_there', False)
#self.map_draw_mode = None
#Map drawing. Meant to have options, but for now just assuming one sort of specification
#self.state_desc = updater_params.get('state_desc', 'depths1')
#self.map_draw_mode = updater_params['map_draw_mode']
#this specification specifices batch example indices for which we do a forward pass.
#need to do one forward pass each index because action sampling is the 'batch.'
#self.action_sampler = action_sampler
#assert self.map_draw_mode == 'specified_indices' and self.action_sampler is not None, (self.map_draw_mode, action_sampler)
#self.map_draw_example_indices = updater_params['map_draw_example_indices']
#self.map_draw_timestep_indices = updater_params['map_draw_timestep_indices']
#self.map_draw_freq = updater_params['map_draw_freq']
def update(self, sess, visualize = False):
if self.um.just_random:
print('Selecting action at random')
batch = {}
for i, dp in enumerate(self.data_provider):
provider_batch = dp.dequeue_batch()
for k in provider_batch:
if k in batch:
batch[k].append(provider_batch[k])
else:
batch[k] = [provider_batch[k]]
for k in ['action', 'action_post', self.state_desc]:
batch[k] = np.concatenate(batch[k], axis=0)
feed_dict = {
self.wm.states : batch[self.state_desc],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post']
}
if self.obj_there_supervision:
batch['obj_there'] = np.concatenate(batch['obj_there'], axis = 0)
feed_dict[self.wm.obj_there_via_msg] = batch['obj_there']
print('state desc! ' + self.state_desc)
res = sess.run(self.targets, feed_dict = feed_dict)
res.pop('um_increment')
global_step = res['global_step']
#if self.map_draw_mode is not None and global_step % self.map_draw_freq == 0:
# if self.map_draw_mode == 'specified_indices':
# map_draw_res = []
# for idx in self.map_draw_example_indices:
# obs_for_actor = [batch[self.state_desc][idx][t] for t in self.map_draw_timestep_indices]
# action_samples = self.action_sampler.sample_actions()
# action, entropy, estimated_world_loss = self.um.act(sess, action_samples, obs_for_actor)
# to_add = {'example_id' : idx, 'action_sample' : action, 'estimated_world_loss' : estimated_world_loss,
# 'action_samples' : action_samples, 'depths1' : batch[self.state_desc][idx],
# 'action' : batch['action'][idx], 'action_post' : batch['action_post'][idx]}
# map_draw_res.append(to_add)
# res['map_draw'] = map_draw_res
res = self.postprocessor.postprocess(res, batch)
return res, global_step
class JustUncertaintyUpdater:
def __init__(self, models, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params, action_sampler = None):
self.data_provider = data_provider \
if isinstance(data_provider, list) else [data_provider]
self.wm = models['world_model']
self.um = models['uncertainty_model']
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.um_lr_params, um_lr = get_learning_rate(self.global_step, ** learning_rate_params['uncertainty_model'])
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'], var_list = self.um.var_list)
self.targets = {'global_step' : self.global_step, 'um_optimizer' : um_opt}
assert set(self.wm.readouts.keys()) != set(self.um.readouts.keys())
self.targets.update(self.wm.readouts)
self.targets.update(self.um.readouts)
#self.targets = {
# 'fut_pred' : self.wm.fut_pred, 'act_pred' : self.wm.act_pred,
# 'fut_loss' : self.wm.fut_loss, 'act_loss' : self.wm.act_loss,
# 'estimated_world_loss' : self.um.estimated_world_loss,
# ''
# }
#self.targets.update({'um_loss' : self.um.uncertainty_loss, 'um_lr' : um_lr, 'um_optimizer' : um_opt,
# 'global_step' : self.global_step, 'loss_per_example' : self.um.true_loss})
self.map_draw_mode = None
#Map drawing. Meant to have options, but for now just assuming one sort of specification
self.state_desc = updater_params.get('state_desc', 'depths1')
self.map_draw_mode = updater_params['map_draw_mode']
#this specification specifices batch example indices for which we do a forward pass.
#need to do one forward pass each index because action sampling is the 'batch.'
self.action_sampler = action_sampler
assert self.map_draw_mode == 'specified_indices' and self.action_sampler is not None, (self.map_draw_mode, action_sampler)
self.map_draw_example_indices = updater_params['map_draw_example_indices']
self.map_draw_timestep_indices = updater_params['map_draw_timestep_indices']
self.map_draw_freq = updater_params['map_draw_freq']
def update(self, sess, visualize = False):
batch = {}
for i, dp in enumerate(self.data_provider):
provider_batch = dp.dequeue_batch()
for k in provider_batch:
if k in batch:
batch[k].append(provider_batch[k])
else:
batch[k] = [provider_batch[k]]
for k in ['action', 'action_post', 'depths1']:
batch[k] = np.concatenate(batch[k], axis=0)
feed_dict = {
self.wm.states : batch[self.state_desc],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post']
}
self.targets['global_step'] = self.global_step
res = sess.run(self.targets, feed_dict = feed_dict)
global_step = res['global_step']
if self.map_draw_mode is not None and global_step % self.map_draw_freq == 0:
if self.map_draw_mode == 'specified_indices':
map_draw_res = []
for idx in self.map_draw_example_indices:
obs_for_actor = [batch[self.state_desc][idx][t] for t in self.map_draw_timestep_indices]
action_samples = self.action_sampler.sample_actions()
action, entropy, estimated_world_loss = self.um.act(sess, action_samples, obs_for_actor)
to_add = {'example_id' : idx, 'action_sample' : action, 'estimated_world_loss' : estimated_world_loss,
'action_samples' : action_samples, 'depths1' : batch[self.state_desc][idx],
'action' : batch['action'][idx], 'action_post' : batch['action_post'][idx]}
map_draw_res.append(to_add)
res['map_draw'] = map_draw_res
res = self.postprocessor.postprocess(res, batch)
return res, global_step
class ActionUncertaintyUpdater:
def __init__(self, models, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params):
self.data_provider = data_provider \
if isinstance(data_provider, list) else [data_provider]
self.wm = models['world_model']
self.um = models['uncertainty_model']
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.act_lr_params, act_lr = get_learning_rate(self.global_step, ** learning_rate_params['world_model']['act_model'])
self.um_lr_params, um_lr = get_learning_rate(self.global_step, ** learning_rate_params['uncertainty_model'])
act_opt_params, act_opt = get_optimizer(act_lr, self.wm.act_loss, self.global_step, optimizer_params['world_model']['act_model'], var_list = self.wm.act_var_list + self.wm.encode_var_list)
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'], var_list = self.um.var_list)
self.global_step = self.global_step / 2
self.targets = {'act_pred' : self.wm.act_pred, 'act_loss' : self.wm.act_loss,
'act_optimizer' : act_opt, 'um_optimizer' : um_opt,
'estimated_world_loss' : self.um.estimated_world_loss,
'um_loss' : self.um.uncertainty_loss, 'loss_per_example' : self.um.true_loss,
'global_step' : self.global_step}
def update(self, sess, visualize = False):
batch = {}
for i, dp in enumerate(self.data_provider):
provider_batch = dp.dequeue_batch()
for k in provider_batch:
if k in batch:
batch[k].append(provider_batch[k])
else:
batch[k] = [provider_batch[k]]
for k in ['action', 'action_post', 'depths1']:
batch[k] = np.concatenate(batch[k], axis=0)
state_desc = 'depths1'
#depths, actions, actions_post, next_depth = postprocess_batch_depth(batch, state_desc)
feed_dict = {
self.wm.states : batch[state_desc],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post']
}
self.targets['global_step'] = self.global_step
res = sess.run(self.targets, feed_dict = feed_dict)
glstep = res['global_step']
res = self.postprocessor.postprocess(res, batch)
return res, glstep
class LatentUncertaintyUpdater:
def __init__(self, world_model, uncertainty_model, data_provider, optimizer_params, learning_rate_params, postprocessor, updater_params = None):
self.data_provider = data_provider
self.wm = world_model
self.um = uncertainty_model
self.postprocessor = postprocessor
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.act_lr_params, act_lr = get_learning_rate(self.global_step, ** learning_rate_params['world_model']['act_model'])
self.fut_lr_params, fut_lr = get_learning_rate(self.global_step, ** learning_rate_params['world_model']['fut_model'])
self.um_lr_params, um_lr = get_learning_rate(self.global_step, ** learning_rate_params['uncertainty_model'])
act_opt_params, act_opt = get_optimizer(act_lr, self.wm.act_loss, self.global_step, optimizer_params['world_model']['act_model'], var_list = self.wm.act_var_list + self.wm.encode_var_list)
fut_opt_params, fut_opt = get_optimizer(fut_lr, self.wm.fut_loss, self.global_step, optimizer_params['world_model']['fut_model'], var_list = self.wm.fut_var_list)
um_opt_params, um_opt = get_optimizer(um_lr, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'], var_list = self.um.var_list)
self.global_step = self.global_step / 3
self.targets = {'encoding_i' : self.wm.encoding_i, 'encoding_f' : self.wm.encoding_f,
'fut_pred' : self.wm.fut_pred, 'act_pred' : self.wm.act_pred,
'act_optimizer' : act_opt, 'fut_optimizer' : fut_opt,
'act_lr' : act_lr, 'fut_lr' : fut_lr,
'fut_loss' : self.wm.fut_loss, 'act_loss' : self.wm.act_loss,
'estimated_world_loss' : self.um.estimated_world_loss
}
self.targets.update({'um_loss' : self.um.uncertainty_loss, 'um_lr' : um_lr, 'um_optimizer' : um_opt,
'global_step' : self.global_step, 'loss_per_example' : self.um.true_loss})
self.state_desc = updater_params['state_desc']
#checking that we don't have repeat names
def start(self, sess):
self.data_provider.start_runner(sess)
sess.run(tf.global_variables_initializer())
def update(self, sess, visualize = False):
batch = self.data_provider.dequeue_batch()
state_desc = self.state_desc
#depths, actions, actions_post, next_depth = postprocess_batch_depth(batch, state_desc)
feed_dict = {
self.wm.states : batch[state_desc],
self.wm.action : batch['action'],
self.wm.action_post : batch['action_post']
}
res = sess.run(self.targets, feed_dict = feed_dict)
res = self.postprocessor.postprocess(res, batch)
return res
class UncertaintyUpdater:
def __init__(self, world_model, uncertainty_model, data_provider, optimizer_params, learning_rate_params, postprocessor):
self.data_provider = data_provider
self.world_model = world_model
self.um = uncertainty_model
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.wm_lr_params, wm_learning_rate = get_learning_rate(self.global_step, ** learning_rate_params['world_model'])
self.wm_opt_params, wm_opt = get_optimizer(wm_learning_rate, self.world_model.loss, self.global_step, optimizer_params['world_model'])
self.world_model_targets = {'given' : self.world_model.processed_input, 'loss' : self.world_model.loss, 'loss_per_example' : self.world_model.loss_per_example, 'learning_rate' : wm_learning_rate, 'optimizer' : wm_opt, 'prediction' : self.world_model.pred, 'tv' : self.world_model.tv}
self.inc_step = self.global_step.assign_add(1)
self.um_lr_params, um_learning_rate = get_learning_rate(self.global_step, **learning_rate_params['uncertainty_model'])
self.um_lr_params, um_opt = get_optimizer(um_learning_rate, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'])
self.global_step = self.global_step / 2
self.um_targets = {'loss' : self.um.uncertainty_loss, 'learning_rate' : um_learning_rate, 'optimizer' : um_opt, 'global_step' : self.global_step}
self.postprocessor = postprocessor
self.world_action_time = self.world_model.action.get_shape().as_list()[1]
def start(self, sess):
self.data_provider.start_runner(sess)
sess.run(tf.global_variables_initializer())
def update(self, sess, visualize = False):
batch = self.data_provider.dequeue_batch()
state_desc = self.um.state_descriptor
wm_feed_dict = {
self.world_model.states : batch[state_desc],
self.world_model.action : batch['action'][:, -self.world_action_time : ]
}
world_model_res = sess.run(self.world_model_targets, feed_dict = wm_feed_dict)
um_feed_dict = {
self.um.s_i : batch[state_desc][:, :-1],
self.um.action_sample : batch['action'][:, -1],
self.um.true_loss : world_model_res['loss_per_example']
}
um_res = sess.run(self.um_targets, feed_dict = um_feed_dict)
wm_res_new = dict(('wm_' + k, v) for k, v in world_model_res.iteritems())
um_res_new = dict(('um_' + k, v) for k, v in um_res.iteritems())
wm_res_new.update(um_res_new)
res = wm_res_new
res['global_step'] = res.pop('um_global_step')
res = self.postprocessor.postprocess(wm_res_new, batch)
return res
class DamianWMUncertaintyUpdater:
def __init__(self, world_model, uncertainty_model, data_provider, optimizer_params, learning_rate_params, postprocessor):
self.data_provider = data_provider
self.world_model = world_model
self.um = uncertainty_model
self.global_step = tf.get_variable('global_step', [], tf.int32, initializer = tf.constant_initializer(0,dtype = tf.int32))
self.wm_lr_params, wm_learning_rate = get_learning_rate(self.global_step, ** learning_rate_params['world_model'])
self.wm_opt_params, wm_opt = get_optimizer(wm_learning_rate, self.world_model.loss, self.global_step, optimizer_params['world_model'])
self.world_model_targets = {'given' : self.world_model.processed_input, 'loss' : self.world_model.loss, 'learning_rate' : wm_learning_rate, 'optimizer' : wm_opt, 'prediction' : self.world_model.pred, 'tv' : self.world_model.tv}
self.inc_step = self.global_step.assign_add(1)
self.wm_lr_params, um_learning_rate = get_learning_rate(self.global_step, **learning_rate_params['uncertainty_model'])
self.wm_lr_params, um_opt = get_optimizer(um_learning_rate, self.um.uncertainty_loss, self.global_step, optimizer_params['uncertainty_model'])
self.um_targets = {'loss' : self.um.uncertainty_loss, 'learning_rate' : um_learning_rate, 'optimizer' : um_opt, 'global_step' : self.global_step}
self.postprocessor = postprocessor
def start(self, sess):
self.data_provider.start_runner(sess)
sess.run(tf.global_variables_initializer())
def update(self, sess, visualize = False):
batch = self.data_provider.dequeue_batch()
depths, objects, actions, action_ids, next_depth = postprocess_batch_for_actionmap(batch)
wm_feed_dict = {
self.world_model.s_i : depths,
self.world_model.s_f : next_depth,
self.world_model.action : actions,
self.world_model.action_id : action_ids,
self.world_model.objects : objects
}
world_model_res = sess.run(self.world_model_targets, feed_dict = wm_feed_dict)
if visualize:
cv2.imshow('pred', world_model_res['prediction'][0] / 4.)#TODO clean up w colors
cv2.imshow('tv', world_model_res['tv'][0] / 4.)
cv2.imshow('processed0', world_model_res['given'][0, 0] / 4.)
cv2.imshow('processed1', world_model_res['given'][0, 1] / 4.)
cv2.waitKey(1)
print('wm loss: ' + str(world_model_res['loss']))
um_feed_dict = {
self.um.s_i : depths,
self.um.action_sample : actions[:, -1],
self.um.true_loss : np.array([world_model_res['loss']])
}
um_res = sess.run(self.um_targets, feed_dict = um_feed_dict)
wm_res_new = dict(('wm_' + k, v) for k, v in world_model_res.iteritems())
um_res_new = dict(('um_' + k, v) for k, v in um_res.iteritems())
wm_res_new.update(um_res_new)
res['global_step'] = res.pop('um_global_step')
res = self.postprocessor.postprocess(wm_res_new, batch)
return res
| 49.04162
| 286
| 0.672072
| 6,026
| 43,598
| 4.557916
| 0.055095
| 0.042234
| 0.034151
| 0.016056
| 0.817848
| 0.789886
| 0.768951
| 0.752785
| 0.726425
| 0.707893
| 0
| 0.006652
| 0.203519
| 43,598
| 888
| 287
| 49.096847
| 0.784305
| 0.133768
| 0
| 0.579909
| 0
| 0
| 0.105283
| 0.002081
| 0
| 0
| 0
| 0.001126
| 0.00761
| 0
| null | null | 0
| 0.012177
| null | null | 0.024353
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e12b9d238c837bf0da1894ff7d36d73870e254ac
| 3,701
|
py
|
Python
|
exams/migrations/0020_auto_20210805_1401.py
|
ankanb240/otis-web
|
45eda65b419705c65c02b15872a137969d53d8e9
|
[
"MIT"
] | 15
|
2021-08-28T18:18:37.000Z
|
2022-03-13T07:48:15.000Z
|
exams/migrations/0020_auto_20210805_1401.py
|
ankanb240/otis-web
|
45eda65b419705c65c02b15872a137969d53d8e9
|
[
"MIT"
] | 65
|
2021-08-20T02:37:27.000Z
|
2022-02-07T17:19:23.000Z
|
exams/migrations/0020_auto_20210805_1401.py
|
ankanb240/otis-web
|
45eda65b419705c65c02b15872a137969d53d8e9
|
[
"MIT"
] | 31
|
2020-01-09T02:35:29.000Z
|
2022-03-13T07:48:18.000Z
|
# Generated by Django 3.2.5 on 2021-08-05 18:01
import django.core.validators
from django.db import migrations, models
import re
class Migration(migrations.Migration):
dependencies = [
('exams', '0019_auto_20210805_1334'),
]
operations = [
migrations.AlterField(
model_name='examattempt',
name='guess1',
field=models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(-1000000000), django.core.validators.MaxValueValidator(1000000000)], verbose_name='Problem 1 response'),
),
migrations.AlterField(
model_name='examattempt',
name='guess2',
field=models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(-1000000000), django.core.validators.MaxValueValidator(1000000000)], verbose_name='Problem 2 response'),
),
migrations.AlterField(
model_name='examattempt',
name='guess3',
field=models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(-1000000000), django.core.validators.MaxValueValidator(1000000000)], verbose_name='Problem 3 response'),
),
migrations.AlterField(
model_name='examattempt',
name='guess4',
field=models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(-1000000000), django.core.validators.MaxValueValidator(1000000000)], verbose_name='Problem 4 response'),
),
migrations.AlterField(
model_name='examattempt',
name='guess5',
field=models.IntegerField(blank=True, null=True, validators=[django.core.validators.MinValueValidator(-1000000000), django.core.validators.MaxValueValidator(1000000000)], verbose_name='Problem 5 response'),
),
migrations.AlterField(
model_name='practiceexam',
name='answer1',
field=models.CharField(default='', max_length=64, validators=[django.core.validators.RegexValidator(re.compile('^\\d+(?:,\\d+)*\\Z'), code='invalid', message='Enter only digits separated by commas.')]),
preserve_default=False,
),
migrations.AlterField(
model_name='practiceexam',
name='answer2',
field=models.CharField(default='', max_length=64, validators=[django.core.validators.RegexValidator(re.compile('^\\d+(?:,\\d+)*\\Z'), code='invalid', message='Enter only digits separated by commas.')]),
preserve_default=False,
),
migrations.AlterField(
model_name='practiceexam',
name='answer3',
field=models.CharField(default='', max_length=64, validators=[django.core.validators.RegexValidator(re.compile('^\\d+(?:,\\d+)*\\Z'), code='invalid', message='Enter only digits separated by commas.')]),
preserve_default=False,
),
migrations.AlterField(
model_name='practiceexam',
name='answer4',
field=models.CharField(default='', max_length=64, validators=[django.core.validators.RegexValidator(re.compile('^\\d+(?:,\\d+)*\\Z'), code='invalid', message='Enter only digits separated by commas.')]),
preserve_default=False,
),
migrations.AlterField(
model_name='practiceexam',
name='answer5',
field=models.CharField(default='', max_length=64, validators=[django.core.validators.RegexValidator(re.compile('^\\d+(?:,\\d+)*\\Z'), code='invalid', message='Enter only digits separated by commas.')]),
preserve_default=False,
),
]
| 52.126761
| 218
| 0.647122
| 365
| 3,701
| 6.484932
| 0.208219
| 0.067596
| 0.135192
| 0.122518
| 0.893536
| 0.890156
| 0.852556
| 0.764681
| 0.764681
| 0.764681
| 0
| 0.053188
| 0.207511
| 3,701
| 70
| 219
| 52.871429
| 0.753836
| 0.012159
| 0
| 0.625
| 1
| 0
| 0.167761
| 0.006294
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.046875
| 0
| 0.09375
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
014434b9827abaf3a6d57e8eae651e94e24baa65
| 16,584
|
py
|
Python
|
sdk/python/pulumi_ucloud/vpc/vpc.py
|
AaronFriel/pulumi-ucloud
|
199278786dddf46bdd370f3f805e30b279c63ff2
|
[
"ECL-2.0",
"Apache-2.0"
] | 4
|
2021-08-18T04:55:38.000Z
|
2021-09-08T07:59:24.000Z
|
sdk/python/pulumi_ucloud/vpc/vpc.py
|
AaronFriel/pulumi-ucloud
|
199278786dddf46bdd370f3f805e30b279c63ff2
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2022-01-28T17:59:37.000Z
|
2022-01-29T03:44:09.000Z
|
sdk/python/pulumi_ucloud/vpc/vpc.py
|
AaronFriel/pulumi-ucloud
|
199278786dddf46bdd370f3f805e30b279c63ff2
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-06-23T07:10:40.000Z
|
2021-06-23T09:25:12.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['VPCArgs', 'VPC']
@pulumi.input_type
class VPCArgs:
def __init__(__self__, *,
cidr_blocks: pulumi.Input[Sequence[pulumi.Input[str]]],
name: Optional[pulumi.Input[str]] = None,
remark: Optional[pulumi.Input[str]] = None,
tag: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a VPC resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] cidr_blocks: The CIDR blocks of VPC.
:param pulumi.Input[str] remark: The remarks of the VPC. (Default: `""`).
:param pulumi.Input[str] tag: A tag assigned to VPC, which contains at most 63 characters and only support Chinese, English, numbers, '-', '_', and '.'. If it is not filled in or a empty string is filled in, then default tag will be assigned. (Default: `Default`).
"""
pulumi.set(__self__, "cidr_blocks", cidr_blocks)
if name is not None:
pulumi.set(__self__, "name", name)
if remark is not None:
pulumi.set(__self__, "remark", remark)
if tag is not None:
pulumi.set(__self__, "tag", tag)
@property
@pulumi.getter(name="cidrBlocks")
def cidr_blocks(self) -> pulumi.Input[Sequence[pulumi.Input[str]]]:
"""
The CIDR blocks of VPC.
"""
return pulumi.get(self, "cidr_blocks")
@cidr_blocks.setter
def cidr_blocks(self, value: pulumi.Input[Sequence[pulumi.Input[str]]]):
pulumi.set(self, "cidr_blocks", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def remark(self) -> Optional[pulumi.Input[str]]:
"""
The remarks of the VPC. (Default: `""`).
"""
return pulumi.get(self, "remark")
@remark.setter
def remark(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "remark", value)
@property
@pulumi.getter
def tag(self) -> Optional[pulumi.Input[str]]:
"""
A tag assigned to VPC, which contains at most 63 characters and only support Chinese, English, numbers, '-', '_', and '.'. If it is not filled in or a empty string is filled in, then default tag will be assigned. (Default: `Default`).
"""
return pulumi.get(self, "tag")
@tag.setter
def tag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tag", value)
@pulumi.input_type
class _VPCState:
def __init__(__self__, *,
cidr_blocks: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
create_time: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_infos: Optional[pulumi.Input[Sequence[pulumi.Input['VPCNetworkInfoArgs']]]] = None,
remark: Optional[pulumi.Input[str]] = None,
tag: Optional[pulumi.Input[str]] = None,
update_time: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering VPC resources.
:param pulumi.Input[Sequence[pulumi.Input[str]]] cidr_blocks: The CIDR blocks of VPC.
:param pulumi.Input[str] create_time: The time of creation for VPC, formatted in RFC3339 time string.
:param pulumi.Input[Sequence[pulumi.Input['VPCNetworkInfoArgs']]] network_infos: It is a nested type which documented below.
:param pulumi.Input[str] remark: The remarks of the VPC. (Default: `""`).
:param pulumi.Input[str] tag: A tag assigned to VPC, which contains at most 63 characters and only support Chinese, English, numbers, '-', '_', and '.'. If it is not filled in or a empty string is filled in, then default tag will be assigned. (Default: `Default`).
:param pulumi.Input[str] update_time: The time whenever there is a change made to VPC, formatted in RFC3339 time string.
"""
if cidr_blocks is not None:
pulumi.set(__self__, "cidr_blocks", cidr_blocks)
if create_time is not None:
pulumi.set(__self__, "create_time", create_time)
if name is not None:
pulumi.set(__self__, "name", name)
if network_infos is not None:
pulumi.set(__self__, "network_infos", network_infos)
if remark is not None:
pulumi.set(__self__, "remark", remark)
if tag is not None:
pulumi.set(__self__, "tag", tag)
if update_time is not None:
pulumi.set(__self__, "update_time", update_time)
@property
@pulumi.getter(name="cidrBlocks")
def cidr_blocks(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
"""
The CIDR blocks of VPC.
"""
return pulumi.get(self, "cidr_blocks")
@cidr_blocks.setter
def cidr_blocks(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "cidr_blocks", value)
@property
@pulumi.getter(name="createTime")
def create_time(self) -> Optional[pulumi.Input[str]]:
"""
The time of creation for VPC, formatted in RFC3339 time string.
"""
return pulumi.get(self, "create_time")
@create_time.setter
def create_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "create_time", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="networkInfos")
def network_infos(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['VPCNetworkInfoArgs']]]]:
"""
It is a nested type which documented below.
"""
return pulumi.get(self, "network_infos")
@network_infos.setter
def network_infos(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['VPCNetworkInfoArgs']]]]):
pulumi.set(self, "network_infos", value)
@property
@pulumi.getter
def remark(self) -> Optional[pulumi.Input[str]]:
"""
The remarks of the VPC. (Default: `""`).
"""
return pulumi.get(self, "remark")
@remark.setter
def remark(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "remark", value)
@property
@pulumi.getter
def tag(self) -> Optional[pulumi.Input[str]]:
"""
A tag assigned to VPC, which contains at most 63 characters and only support Chinese, English, numbers, '-', '_', and '.'. If it is not filled in or a empty string is filled in, then default tag will be assigned. (Default: `Default`).
"""
return pulumi.get(self, "tag")
@tag.setter
def tag(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tag", value)
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> Optional[pulumi.Input[str]]:
"""
The time whenever there is a change made to VPC, formatted in RFC3339 time string.
"""
return pulumi.get(self, "update_time")
@update_time.setter
def update_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "update_time", value)
class VPC(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cidr_blocks: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
remark: Optional[pulumi.Input[str]] = None,
tag: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Provides a VPC resource.
> **Note** The network segment can only be created or deleted, can not perform both of them at the same time.
## Example Usage
```python
import pulumi
import pulumi_ucloud as ucloud
example = ucloud.vpc.VPC("example",
cidr_blocks=["192.168.0.0/16"],
tag="tf-example")
```
## Import
VPC can be imported using the `id`, e.g.
```sh
$ pulumi import ucloud:vpc/vPC:VPC example uvnet-abc123456
```
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] cidr_blocks: The CIDR blocks of VPC.
:param pulumi.Input[str] remark: The remarks of the VPC. (Default: `""`).
:param pulumi.Input[str] tag: A tag assigned to VPC, which contains at most 63 characters and only support Chinese, English, numbers, '-', '_', and '.'. If it is not filled in or a empty string is filled in, then default tag will be assigned. (Default: `Default`).
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: VPCArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Provides a VPC resource.
> **Note** The network segment can only be created or deleted, can not perform both of them at the same time.
## Example Usage
```python
import pulumi
import pulumi_ucloud as ucloud
example = ucloud.vpc.VPC("example",
cidr_blocks=["192.168.0.0/16"],
tag="tf-example")
```
## Import
VPC can be imported using the `id`, e.g.
```sh
$ pulumi import ucloud:vpc/vPC:VPC example uvnet-abc123456
```
:param str resource_name: The name of the resource.
:param VPCArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(VPCArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
cidr_blocks: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
name: Optional[pulumi.Input[str]] = None,
remark: Optional[pulumi.Input[str]] = None,
tag: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = VPCArgs.__new__(VPCArgs)
if cidr_blocks is None and not opts.urn:
raise TypeError("Missing required property 'cidr_blocks'")
__props__.__dict__["cidr_blocks"] = cidr_blocks
__props__.__dict__["name"] = name
__props__.__dict__["remark"] = remark
__props__.__dict__["tag"] = tag
__props__.__dict__["create_time"] = None
__props__.__dict__["network_infos"] = None
__props__.__dict__["update_time"] = None
super(VPC, __self__).__init__(
'ucloud:vpc/vPC:VPC',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
cidr_blocks: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
create_time: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
network_infos: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VPCNetworkInfoArgs']]]]] = None,
remark: Optional[pulumi.Input[str]] = None,
tag: Optional[pulumi.Input[str]] = None,
update_time: Optional[pulumi.Input[str]] = None) -> 'VPC':
"""
Get an existing VPC resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[str]]] cidr_blocks: The CIDR blocks of VPC.
:param pulumi.Input[str] create_time: The time of creation for VPC, formatted in RFC3339 time string.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['VPCNetworkInfoArgs']]]] network_infos: It is a nested type which documented below.
:param pulumi.Input[str] remark: The remarks of the VPC. (Default: `""`).
:param pulumi.Input[str] tag: A tag assigned to VPC, which contains at most 63 characters and only support Chinese, English, numbers, '-', '_', and '.'. If it is not filled in or a empty string is filled in, then default tag will be assigned. (Default: `Default`).
:param pulumi.Input[str] update_time: The time whenever there is a change made to VPC, formatted in RFC3339 time string.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _VPCState.__new__(_VPCState)
__props__.__dict__["cidr_blocks"] = cidr_blocks
__props__.__dict__["create_time"] = create_time
__props__.__dict__["name"] = name
__props__.__dict__["network_infos"] = network_infos
__props__.__dict__["remark"] = remark
__props__.__dict__["tag"] = tag
__props__.__dict__["update_time"] = update_time
return VPC(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="cidrBlocks")
def cidr_blocks(self) -> pulumi.Output[Sequence[str]]:
"""
The CIDR blocks of VPC.
"""
return pulumi.get(self, "cidr_blocks")
@property
@pulumi.getter(name="createTime")
def create_time(self) -> pulumi.Output[str]:
"""
The time of creation for VPC, formatted in RFC3339 time string.
"""
return pulumi.get(self, "create_time")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter(name="networkInfos")
def network_infos(self) -> pulumi.Output[Sequence['outputs.VPCNetworkInfo']]:
"""
It is a nested type which documented below.
"""
return pulumi.get(self, "network_infos")
@property
@pulumi.getter
def remark(self) -> pulumi.Output[str]:
"""
The remarks of the VPC. (Default: `""`).
"""
return pulumi.get(self, "remark")
@property
@pulumi.getter
def tag(self) -> pulumi.Output[Optional[str]]:
"""
A tag assigned to VPC, which contains at most 63 characters and only support Chinese, English, numbers, '-', '_', and '.'. If it is not filled in or a empty string is filled in, then default tag will be assigned. (Default: `Default`).
"""
return pulumi.get(self, "tag")
@property
@pulumi.getter(name="updateTime")
def update_time(self) -> pulumi.Output[str]:
"""
The time whenever there is a change made to VPC, formatted in RFC3339 time string.
"""
return pulumi.get(self, "update_time")
| 41.151365
| 272
| 0.622407
| 2,035
| 16,584
| 4.877641
| 0.096806
| 0.09863
| 0.087447
| 0.077574
| 0.816442
| 0.78652
| 0.764558
| 0.740681
| 0.708644
| 0.675902
| 0
| 0.006448
| 0.261216
| 16,584
| 402
| 273
| 41.253731
| 0.803706
| 0.331826
| 0
| 0.625551
| 1
| 0
| 0.082637
| 0.002162
| 0
| 0
| 0
| 0
| 0
| 1
| 0.15859
| false
| 0.004405
| 0.030837
| 0.013216
| 0.286344
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6dcf02cbbf92488d6d8f902d026fee57c2a0ba1a
| 161
|
py
|
Python
|
users/admin.py
|
shubhankar5/Mitron-Achatting-app-in-django
|
524086254794a713110e496b70588865116c322f
|
[
"Apache-2.0"
] | 7
|
2021-03-10T13:28:30.000Z
|
2021-12-22T15:40:16.000Z
|
users/admin.py
|
shubhankar5/Mitron-Achatting-app-in-django
|
524086254794a713110e496b70588865116c322f
|
[
"Apache-2.0"
] | 1
|
2022-03-11T04:29:39.000Z
|
2022-03-12T17:57:23.000Z
|
users/admin.py
|
shubhankar5/Mitron-Achatting-app-in-django
|
524086254794a713110e496b70588865116c322f
|
[
"Apache-2.0"
] | 4
|
2021-07-10T16:49:28.000Z
|
2022-03-11T04:54:21.000Z
|
from django.contrib import admin
from .models import Profile, Address, Friends, BlockedUsers
admin.site.register([Profile, Address, Friends, BlockedUsers])
| 32.2
| 62
| 0.78882
| 19
| 161
| 6.684211
| 0.631579
| 0.220472
| 0.330709
| 0.519685
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124224
| 161
| 5
| 62
| 32.2
| 0.900709
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
6dd713e0a39ff1a65090d3769d274eada8575039
| 335,940
|
py
|
Python
|
pysnmp/NETBOTZ320-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 11
|
2021-02-02T16:27:16.000Z
|
2021-08-31T06:22:49.000Z
|
pysnmp/NETBOTZ320-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 75
|
2021-02-24T17:30:31.000Z
|
2021-12-08T00:01:18.000Z
|
pysnmp/NETBOTZ320-MIB.py
|
agustinhenze/mibs.snmplabs.com
|
1fc5c07860542b89212f4c8ab807057d9a9206c7
|
[
"Apache-2.0"
] | 10
|
2019-04-30T05:51:36.000Z
|
2022-02-16T03:33:41.000Z
|
#
# PySNMP MIB module NETBOTZ320-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/NETBOTZ320-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 20:08:48 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
Integer, ObjectIdentifier, OctetString = mibBuilder.importSymbols("ASN1", "Integer", "ObjectIdentifier", "OctetString")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
SingleValueConstraint, ConstraintsUnion, ConstraintsIntersection, ValueSizeConstraint, ValueRangeConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "SingleValueConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueSizeConstraint", "ValueRangeConstraint")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
MibScalar, MibTable, MibTableRow, MibTableColumn, enterprises, Counter32, IpAddress, Integer32, iso, MibIdentifier, Counter64, NotificationType, ModuleIdentity, Bits, Unsigned32, Gauge32, ObjectIdentity, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "enterprises", "Counter32", "IpAddress", "Integer32", "iso", "MibIdentifier", "Counter64", "NotificationType", "ModuleIdentity", "Bits", "Unsigned32", "Gauge32", "ObjectIdentity", "TimeTicks")
DateAndTime, TextualConvention, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "DateAndTime", "TextualConvention", "DisplayString")
netBotz_APC = ModuleIdentity((1, 3, 6, 1, 4, 1, 5528, 100, 1)).setLabel("netBotz-APC")
netBotz_APC.setRevisions(('2009-07-02 00:00',))
if mibBuilder.loadTexts: netBotz_APC.setLastUpdated('200907020000Z')
if mibBuilder.loadTexts: netBotz_APC.setOrganization('Schneider Electric / APC')
netBotzAPC = MibIdentifier((1, 3, 6, 1, 4, 1, 5528))
netBotz = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100))
netBotzTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10))
netBotzTrapParms = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 11))
netBotzProducts = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20))
netBotzGenericTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 1))
netBotzSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2))
netBotzPodTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3))
netBotzPortTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 4))
netBotzTempSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1))
netBotzHumiditySensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2))
netBotzDewPointSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3))
netBotzAirFlowSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4))
netBotzAudioSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5))
netBotzAmpDetectSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6))
netBotzDryContactSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 7))
netBotzCameraMotionSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 8))
netBotzDoorSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 9))
netBotzMicPlugSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 10))
netBotzSpeakerPlugSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 11))
netBotzTVSignalSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 12))
netBotzGPSPositionSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13))
netBotzGPSMovementSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14))
netBotzGPSStatusSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 15))
netBotzWirelessStatusSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 22))
netBotzPacketDropSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23))
netBotzSNMPCrawlerSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24))
netBotzPlugModuleStatusSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 25))
netBotzOutputControlSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 26))
netBotzMultiRAESensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27))
netBotzMultiRAESensorStatusTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 28))
netBotzMultiRAEDeviceStatusTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 29))
netBotzLinkStatusSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 30))
netBotzLoopVoltageSensorTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31))
netBotzBasePodTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 1))
netBotzSensorPodTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 2))
netBotzCameraPodTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 3))
netBotzCCTVPodTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 4))
netBotz4to20mAPodTraps = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 5))
netBotzBotz = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10))
netBotzWallBotz500 = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2000))
netBotz420Wall = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2001))
raeSystemsAreaConnect500 = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2002))
netBotz420Rack = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2003))
netBotz320Wall = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2004))
netBotz320Rack = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2005))
netBotz420ERack = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2006))
netBotz320ERack = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2007))
netBotz220Camera = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2008))
apprion500 = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2009))
avocent500 = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2010))
netBotz320EWall = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2011))
netBotz420EWall = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2012))
netBotz550Rack = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2013))
netBotz450Rack = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2014))
netBotz455Wall = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2015))
netBotz355Wall = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 20, 10, 2016))
class OperStatus(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("disconnected", 0), ("error", 1), ("normal", 2))
class ErrorStatus(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4, 5))
namedValues = NamedValues(("normal", 0), ("info", 1), ("warning", 2), ("error", 3), ("critical", 4), ("failure", 5))
class BoolValue(TextualConvention, Integer32):
status = 'current'
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(0, 1, 2))
namedValues = NamedValues(("no", 0), ("yes", 1), ("null", 2))
netBotzEnclosures = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 2))
netBotzPorts = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 3))
netBotzSensors = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 4))
netBotzErrors = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 5))
netBotzNumericSensors = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1))
netBotzStateSensors = MibIdentifier((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2))
netBotzErrorStatus = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 100), ErrorStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzErrorStatus.setStatus('current')
enclosureTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 2, 1), )
if mibBuilder.loadTexts: enclosureTable.setStatus('current')
enclosureEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 2, 1, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "enclosureIndex"))
if mibBuilder.loadTexts: enclosureEntry.setStatus('current')
enclosureId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 2, 1, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: enclosureId.setStatus('current')
enclosureStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 2, 1, 1, 2), OperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: enclosureStatus.setStatus('current')
enclosureErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 2, 1, 1, 3), ErrorStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: enclosureErrorStatus.setStatus('current')
enclosureLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 2, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: enclosureLabel.setStatus('current')
enclosureParentEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 2, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: enclosureParentEncId.setStatus('current')
enclosureDockedToEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 2, 1, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: enclosureDockedToEncId.setStatus('current')
enclosureIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 2, 1, 1, 7), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: enclosureIndex.setStatus('current')
dinPortTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 3, 1), )
if mibBuilder.loadTexts: dinPortTable.setStatus('current')
dinPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 3, 1, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "dinPortIndex"))
if mibBuilder.loadTexts: dinPortEntry.setStatus('current')
dinPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 3, 1, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dinPortId.setStatus('current')
dinPortStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 3, 1, 1, 2), OperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dinPortStatus.setStatus('current')
dinPortLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 3, 1, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dinPortLabel.setStatus('current')
dinPortEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 3, 1, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dinPortEncId.setStatus('current')
dinPortSensorIdSuffix = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 3, 1, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dinPortSensorIdSuffix.setStatus('current')
dinPortSupportsAverage = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 3, 1, 1, 6), BoolValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dinPortSupportsAverage.setStatus('current')
dinPortSupportsRMS = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 3, 1, 1, 7), BoolValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dinPortSupportsRMS.setStatus('current')
dinPortSupportsDryContact = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 3, 1, 1, 8), BoolValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dinPortSupportsDryContact.setStatus('current')
dinPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 3, 1, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dinPortIndex.setStatus('current')
otherPortTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 3, 10), )
if mibBuilder.loadTexts: otherPortTable.setStatus('current')
otherPortEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 3, 10, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "otherPortIndex"))
if mibBuilder.loadTexts: otherPortEntry.setStatus('current')
otherPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 3, 10, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherPortId.setStatus('current')
otherPortStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 3, 10, 1, 2), OperStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherPortStatus.setStatus('current')
otherPortLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 3, 10, 1, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherPortLabel.setStatus('current')
otherPortEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 3, 10, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherPortEncId.setStatus('current')
otherPortIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 3, 10, 1, 5), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherPortIndex.setStatus('current')
tempSensorTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 1), )
if mibBuilder.loadTexts: tempSensorTable.setStatus('current')
tempSensorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 1, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "tempSensorIndex"))
if mibBuilder.loadTexts: tempSensorEntry.setStatus('current')
tempSensorId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 1, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tempSensorId.setStatus('current')
tempSensorValue = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-500, 1000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tempSensorValue.setStatus('current')
tempSensorErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 1, 1, 3), ErrorStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tempSensorErrorStatus.setStatus('current')
tempSensorLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tempSensorLabel.setStatus('current')
tempSensorEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tempSensorEncId.setStatus('current')
tempSensorPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 1, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tempSensorPortId.setStatus('current')
tempSensorValueStr = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 1, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tempSensorValueStr.setStatus('current')
tempSensorValueInt = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-50, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tempSensorValueInt.setStatus('current')
tempSensorValueIntF = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-50, 212))).setMaxAccess("readonly")
if mibBuilder.loadTexts: tempSensorValueIntF.setStatus('current')
tempSensorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 1, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: tempSensorIndex.setStatus('current')
humiSensorTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 2), )
if mibBuilder.loadTexts: humiSensorTable.setStatus('current')
humiSensorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 2, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "humiSensorIndex"))
if mibBuilder.loadTexts: humiSensorEntry.setStatus('current')
humiSensorId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 2, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: humiSensorId.setStatus('current')
humiSensorValue = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 2, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: humiSensorValue.setStatus('current')
humiSensorErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 2, 1, 3), ErrorStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: humiSensorErrorStatus.setStatus('current')
humiSensorLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 2, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: humiSensorLabel.setStatus('current')
humiSensorEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 2, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: humiSensorEncId.setStatus('current')
humiSensorPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 2, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: humiSensorPortId.setStatus('current')
humiSensorValueStr = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 2, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: humiSensorValueStr.setStatus('current')
humiSensorValueInt = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 2, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: humiSensorValueInt.setStatus('current')
humiSensorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 2, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: humiSensorIndex.setStatus('current')
dewPointSensorTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 3), )
if mibBuilder.loadTexts: dewPointSensorTable.setStatus('current')
dewPointSensorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 3, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "dewPointSensorIndex"))
if mibBuilder.loadTexts: dewPointSensorEntry.setStatus('current')
dewPointSensorId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 3, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dewPointSensorId.setStatus('current')
dewPointSensorValue = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-500, 1000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dewPointSensorValue.setStatus('current')
dewPointSensorErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 3, 1, 3), ErrorStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dewPointSensorErrorStatus.setStatus('current')
dewPointSensorLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 3, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dewPointSensorLabel.setStatus('current')
dewPointSensorEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 3, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dewPointSensorEncId.setStatus('current')
dewPointSensorPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 3, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dewPointSensorPortId.setStatus('current')
dewPointSensorValueStr = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 3, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dewPointSensorValueStr.setStatus('current')
dewPointSensorValueInt = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 3, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-50, 100))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dewPointSensorValueInt.setStatus('current')
dewPointSensorValueIntF = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 3, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-40, 122))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dewPointSensorValueIntF.setStatus('current')
dewPointSensorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 3, 1, 10), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dewPointSensorIndex.setStatus('current')
audioSensorTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 4), )
if mibBuilder.loadTexts: audioSensorTable.setStatus('current')
audioSensorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 4, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "audioSensorIndex"))
if mibBuilder.loadTexts: audioSensorEntry.setStatus('current')
audioSensorId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 4, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: audioSensorId.setStatus('current')
audioSensorValue = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 4, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: audioSensorValue.setStatus('current')
audioSensorErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 4, 1, 3), ErrorStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: audioSensorErrorStatus.setStatus('current')
audioSensorLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 4, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: audioSensorLabel.setStatus('current')
audioSensorEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 4, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: audioSensorEncId.setStatus('current')
audioSensorPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 4, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: audioSensorPortId.setStatus('current')
audioSensorValueStr = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 4, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: audioSensorValueStr.setStatus('current')
audioSensorValueInt = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 4, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: audioSensorValueInt.setStatus('current')
audioSensorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 4, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: audioSensorIndex.setStatus('current')
airFlowSensorTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 5), )
if mibBuilder.loadTexts: airFlowSensorTable.setStatus('current')
airFlowSensorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 5, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "airFlowSensorIndex"))
if mibBuilder.loadTexts: airFlowSensorEntry.setStatus('current')
airFlowSensorId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 5, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: airFlowSensorId.setStatus('current')
airFlowSensorValue = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 5, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 10000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: airFlowSensorValue.setStatus('current')
airFlowSensorErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 5, 1, 3), ErrorStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: airFlowSensorErrorStatus.setStatus('current')
airFlowSensorLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 5, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: airFlowSensorLabel.setStatus('current')
airFlowSensorEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 5, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: airFlowSensorEncId.setStatus('current')
airFlowSensorPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 5, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: airFlowSensorPortId.setStatus('current')
airFlowSensorValueStr = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 5, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: airFlowSensorValueStr.setStatus('current')
airFlowSensorValueInt = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 5, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1000))).setMaxAccess("readonly")
if mibBuilder.loadTexts: airFlowSensorValueInt.setStatus('current')
airFlowSensorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 5, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: airFlowSensorIndex.setStatus('current')
ampDetectSensorTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 6), )
if mibBuilder.loadTexts: ampDetectSensorTable.setStatus('current')
ampDetectSensorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 6, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "ampDetectSensorIndex"))
if mibBuilder.loadTexts: ampDetectSensorEntry.setStatus('current')
ampDetectSensorId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 6, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ampDetectSensorId.setStatus('current')
ampDetectSensorValue = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 6, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ampDetectSensorValue.setStatus('current')
ampDetectSensorErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 6, 1, 3), ErrorStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ampDetectSensorErrorStatus.setStatus('current')
ampDetectSensorLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 6, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ampDetectSensorLabel.setStatus('current')
ampDetectSensorEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 6, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ampDetectSensorEncId.setStatus('current')
ampDetectSensorPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 6, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ampDetectSensorPortId.setStatus('current')
ampDetectSensorValueStr = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 6, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ampDetectSensorValueStr.setStatus('current')
ampDetectSensorValueInt = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 6, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 6554))).setMaxAccess("readonly")
if mibBuilder.loadTexts: ampDetectSensorValueInt.setStatus('current')
ampDetectSensorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 6, 1, 9), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: ampDetectSensorIndex.setStatus('current')
otherNumericSensorTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 10), )
if mibBuilder.loadTexts: otherNumericSensorTable.setStatus('current')
otherNumericSensorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 10, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "otherNumericSensorIndex"))
if mibBuilder.loadTexts: otherNumericSensorEntry.setStatus('current')
otherNumericSensorId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 10, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherNumericSensorId.setStatus('current')
otherNumericSensorValue = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 10, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherNumericSensorValue.setStatus('current')
otherNumericSensorErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 10, 1, 3), ErrorStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherNumericSensorErrorStatus.setStatus('current')
otherNumericSensorLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 10, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherNumericSensorLabel.setStatus('current')
otherNumericSensorEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 10, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherNumericSensorEncId.setStatus('current')
otherNumericSensorPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 10, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherNumericSensorPortId.setStatus('current')
otherNumericSensorValueStr = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 10, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherNumericSensorValueStr.setStatus('current')
otherNumericSensorValueInt = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 10, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherNumericSensorValueInt.setStatus('current')
otherNumericSensorUnits = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 10, 1, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherNumericSensorUnits.setStatus('current')
otherNumericSensorValueIntX1000 = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 10, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherNumericSensorValueIntX1000.setStatus('current')
otherNumericSensorValueIntX1000000 = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 10, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherNumericSensorValueIntX1000000.setStatus('current')
otherNumericSensorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 1, 10, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherNumericSensorIndex.setStatus('current')
dryContactSensorTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 1), )
if mibBuilder.loadTexts: dryContactSensorTable.setStatus('current')
dryContactSensorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 1, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "dryContactSensorIndex"))
if mibBuilder.loadTexts: dryContactSensorEntry.setStatus('current')
dryContactSensorId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 1, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dryContactSensorId.setStatus('current')
dryContactSensorValue = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(-1, 0, 1))).clone(namedValues=NamedValues(("null", -1), ("open", 0), ("closed", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dryContactSensorValue.setStatus('current')
dryContactSensorErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 1, 1, 3), ErrorStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dryContactSensorErrorStatus.setStatus('current')
dryContactSensorLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 1, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dryContactSensorLabel.setStatus('current')
dryContactSensorEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dryContactSensorEncId.setStatus('current')
dryContactSensorPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 1, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: dryContactSensorPortId.setStatus('current')
dryContactSensorValueStr = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 1, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dryContactSensorValueStr.setStatus('current')
dryContactSensorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 1, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: dryContactSensorIndex.setStatus('current')
doorSwitchSensorTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 2), )
if mibBuilder.loadTexts: doorSwitchSensorTable.setStatus('current')
doorSwitchSensorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 2, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "doorSwitchSensorIndex"))
if mibBuilder.loadTexts: doorSwitchSensorEntry.setStatus('current')
doorSwitchSensorId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 2, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: doorSwitchSensorId.setStatus('current')
doorSwitchSensorValue = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 2, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(-1, 0, 1))).clone(namedValues=NamedValues(("null", -1), ("open", 0), ("closed", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: doorSwitchSensorValue.setStatus('current')
doorSwitchSensorErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 2, 1, 3), ErrorStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: doorSwitchSensorErrorStatus.setStatus('current')
doorSwitchSensorLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 2, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: doorSwitchSensorLabel.setStatus('current')
doorSwitchSensorEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 2, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: doorSwitchSensorEncId.setStatus('current')
doorSwitchSensorPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 2, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: doorSwitchSensorPortId.setStatus('current')
doorSwitchSensorValueStr = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 2, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: doorSwitchSensorValueStr.setStatus('current')
doorSwitchSensorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 2, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: doorSwitchSensorIndex.setStatus('current')
cameraMotionSensorTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 3), )
if mibBuilder.loadTexts: cameraMotionSensorTable.setStatus('current')
cameraMotionSensorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 3, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "cameraMotionSensorIndex"))
if mibBuilder.loadTexts: cameraMotionSensorEntry.setStatus('current')
cameraMotionSensorId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 3, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cameraMotionSensorId.setStatus('current')
cameraMotionSensorValue = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 3, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(-1, 0, 1))).clone(namedValues=NamedValues(("null", -1), ("noMotion", 0), ("motionDetected", 1)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cameraMotionSensorValue.setStatus('current')
cameraMotionSensorErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 3, 1, 3), ErrorStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cameraMotionSensorErrorStatus.setStatus('current')
cameraMotionSensorLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 3, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cameraMotionSensorLabel.setStatus('current')
cameraMotionSensorEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 3, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cameraMotionSensorEncId.setStatus('current')
cameraMotionSensorPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 3, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: cameraMotionSensorPortId.setStatus('current')
cameraMotionSensorValueStr = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 3, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cameraMotionSensorValueStr.setStatus('current')
cameraMotionSensorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 3, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: cameraMotionSensorIndex.setStatus('current')
otherStateSensorTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 10), )
if mibBuilder.loadTexts: otherStateSensorTable.setStatus('current')
otherStateSensorEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 10, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "otherStateSensorIndex"))
if mibBuilder.loadTexts: otherStateSensorEntry.setStatus('current')
otherStateSensorId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 10, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherStateSensorId.setStatus('current')
otherStateSensorValue = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 10, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherStateSensorValue.setStatus('current')
otherStateSensorErrorStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 10, 1, 3), ErrorStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherStateSensorErrorStatus.setStatus('current')
otherStateSensorLabel = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 10, 1, 4), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherStateSensorLabel.setStatus('current')
otherStateSensorEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 10, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherStateSensorEncId.setStatus('current')
otherStateSensorPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 10, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherStateSensorPortId.setStatus('current')
otherStateSensorValueStr = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 10, 1, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherStateSensorValueStr.setStatus('current')
otherStateSensorIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 4, 2, 10, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: otherStateSensorIndex.setStatus('current')
errorCondTable = MibTable((1, 3, 6, 1, 4, 1, 5528, 100, 5, 1), )
if mibBuilder.loadTexts: errorCondTable.setStatus('current')
errorCondEntry = MibTableRow((1, 3, 6, 1, 4, 1, 5528, 100, 5, 1, 1), ).setIndexNames((0, "NETBOTZ320-MIB", "errorCondIndex"))
if mibBuilder.loadTexts: errorCondEntry.setStatus('current')
errorCondId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 5, 1, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: errorCondId.setStatus('current')
errorCondSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 5, 1, 1, 2), ErrorStatus()).setMaxAccess("readonly")
if mibBuilder.loadTexts: errorCondSeverity.setStatus('current')
errorCondTypeId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 5, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: errorCondTypeId.setStatus('current')
errorCondStartTime = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 5, 1, 1, 4), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: errorCondStartTime.setStatus('current')
errorCondStartTimeStr = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 5, 1, 1, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: errorCondStartTimeStr.setStatus('current')
errorCondResolved = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 5, 1, 1, 6), BoolValue()).setMaxAccess("readonly")
if mibBuilder.loadTexts: errorCondResolved.setStatus('current')
errorCondResolvedTime = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 5, 1, 1, 7), DateAndTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: errorCondResolvedTime.setStatus('current')
errorCondResolvedTimeStr = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 5, 1, 1, 8), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: errorCondResolvedTimeStr.setStatus('current')
errorCondEncId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 5, 1, 1, 9), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: errorCondEncId.setStatus('current')
errorCondPortId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 5, 1, 1, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: errorCondPortId.setStatus('current')
errorCondSensorId = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 5, 1, 1, 11), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: errorCondSensorId.setStatus('current')
errorCondIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 5528, 100, 5, 1, 1, 12), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: errorCondIndex.setStatus('current')
netBotzTrapErrorID = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapErrorID.setStatus('current')
netBotzTrapErrorType = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapErrorType.setStatus('current')
netBotzTrapErrorTypeLabel = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 3), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapErrorTypeLabel.setStatus('current')
netBotzTrapSensorID = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapSensorID.setStatus('current')
netBotzTrapSensorLabel = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 5), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapSensorLabel.setStatus('current')
netBotzTrapPodID = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapPodID.setStatus('current')
netBotzTrapPodLabel = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 7), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapPodLabel.setStatus('current')
netBotzTrapPortID = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 127))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapPortID.setStatus('current')
netBotzTrapPortLabel = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 9), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapPortLabel.setStatus('current')
netBotzTrapStartTime = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapStartTime.setStatus('current')
netBotzTrapNotifyTime = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapNotifyTime.setStatus('current')
netBotzTrapResolveTime = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapResolveTime.setStatus('current')
netBotzTrapSeverity = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(0, 1, 2, 3, 4))).clone(namedValues=NamedValues(("information", 0), ("warning", 1), ("error", 2), ("critical", 3), ("failure", 4)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapSeverity.setStatus('current')
netBotzTrapSensorValue = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 14), DisplayString()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapSensorValue.setStatus('current')
netBotzTrapSensorValueInt = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapSensorValueInt.setStatus('current')
netBotzTrapSensorValueFraction = MibScalar((1, 3, 6, 1, 4, 1, 5528, 100, 11, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: netBotzTrapSensorValueFraction.setStatus('current')
netBotzTempTooHigh = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 2)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempTooHigh.setStatus('current')
netBotzTempTooHighRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 102)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempTooHighRTN.setStatus('current')
netBotzTempTooLow = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 3)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempTooLow.setStatus('current')
netBotzTempTooLowRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 103)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempTooLowRTN.setStatus('current')
netBotzTempTooHighTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 4)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempTooHighTooLong.setStatus('current')
netBotzTempTooHighTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 104)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempTooHighTooLongRTN.setStatus('current')
netBotzTempTooLowTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 5)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempTooLowTooLong.setStatus('current')
netBotzTempTooLowForTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 105)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempTooLowForTooLongRTN.setStatus('current')
netBotzTempUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempUnplugged.setStatus('current')
netBotzTempReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempReplugged.setStatus('current')
netBotzTempIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 8)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempIncreasingTooQuickly.setStatus('current')
netBotzTempNotIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 108)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempNotIncreasingTooQuickly.setStatus('current')
netBotzTempDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 9)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempDecreasingTooQuickly.setStatus('current')
netBotzTempNotDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 109)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempNotDecreasingTooQuickly.setStatus('current')
netBotzTempError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempError.setStatus('current')
netBotzTempErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 1, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTempErrorRTN.setStatus('current')
netBotzHumidityTooHigh = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 2)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityTooHigh.setStatus('current')
netBotzHumidityTooHighRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 102)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityTooHighRTN.setStatus('current')
netBotzHumidityTooLow = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 3)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityTooLow.setStatus('current')
netBotzHumidityTooLowRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 103)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityTooLowRTN.setStatus('current')
netBotzHumidityTooHighTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 4)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityTooHighTooLong.setStatus('current')
netBotzHumidityTooHighTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 104)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityTooHighTooLongRTN.setStatus('current')
netBotzHumidityTooLowTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 5)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityTooLowTooLong.setStatus('current')
netBotzHumidityTooLowForTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 105)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityTooLowForTooLongRTN.setStatus('current')
netBotzHumidityUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityUnplugged.setStatus('current')
netBotzHumidityReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityReplugged.setStatus('current')
netBotzHumidityIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 8)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityIncreasingTooQuickly.setStatus('current')
netBotzHumidityNotIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 108)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityNotIncreasingTooQuickly.setStatus('current')
netBotzHumidityDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 9)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityDecreasingTooQuickly.setStatus('current')
netBotzHumidityNotDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 109)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityNotDecreasingTooQuickly.setStatus('current')
netBotzHumidityError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityError.setStatus('current')
netBotzHumidityErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 2, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzHumidityErrorRTN.setStatus('current')
netBotzDewPointTooHigh = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 2)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointTooHigh.setStatus('current')
netBotzDewPointTooHighRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 102)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointTooHighRTN.setStatus('current')
netBotzDewPointTooLow = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 3)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointTooLow.setStatus('current')
netBotzDewPointTooLowRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 103)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointTooLowRTN.setStatus('current')
netBotzDewPointTooHighTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 4)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointTooHighTooLong.setStatus('current')
netBotzDewPointTooHighTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 104)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointTooHighTooLongRTN.setStatus('current')
netBotzDewPointTooLowTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 5)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointTooLowTooLong.setStatus('current')
netBotzDewPointTooLowForTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 105)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointTooLowForTooLongRTN.setStatus('current')
netBotzDewPointUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointUnplugged.setStatus('current')
netBotzDewPointReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointReplugged.setStatus('current')
netBotzDewPointIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 8)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointIncreasingTooQuickly.setStatus('current')
netBotzDewPointNotIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 108)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointNotIncreasingTooQuickly.setStatus('current')
netBotzDewPointDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 9)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointDecreasingTooQuickly.setStatus('current')
netBotzDewPointNotDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 109)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointNotDecreasingTooQuickly.setStatus('current')
netBotzDewPointError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointError.setStatus('current')
netBotzDewPointErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 3, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDewPointErrorRTN.setStatus('current')
netBotzAirFlowTooHigh = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 2)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowTooHigh.setStatus('current')
netBotzAirFlowTooHighRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 102)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowTooHighRTN.setStatus('current')
netBotzAirFlowTooLow = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 3)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowTooLow.setStatus('current')
netBotzAirFlowTooLowRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 103)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowTooLowRTN.setStatus('current')
netBotzAirFlowTooHighTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 4)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowTooHighTooLong.setStatus('current')
netBotzAirFlowTooHighTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 104)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowTooHighTooLongRTN.setStatus('current')
netBotzAirFlowTooLowTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 5)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowTooLowTooLong.setStatus('current')
netBotzAirFlowTooLowForTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 105)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowTooLowForTooLongRTN.setStatus('current')
netBotzAirFlowUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowUnplugged.setStatus('current')
netBotzAirFlowReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowReplugged.setStatus('current')
netBotzAirFlowIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 8)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowIncreasingTooQuickly.setStatus('current')
netBotzAirFlowNotIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 108)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowNotIncreasingTooQuickly.setStatus('current')
netBotzAirFlowDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 9)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowDecreasingTooQuickly.setStatus('current')
netBotzAirFlowNotDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 109)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowNotDecreasingTooQuickly.setStatus('current')
netBotzAirFlowError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowError.setStatus('current')
netBotzAirFlowErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 4, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAirFlowErrorRTN.setStatus('current')
netBotzAudioTooHigh = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 2)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioTooHigh.setStatus('current')
netBotzAudioTooHighRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 102)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioTooHighRTN.setStatus('current')
netBotzAudioTooLow = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 3)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioTooLow.setStatus('current')
netBotzAudioTooLowRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 103)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioTooLowRTN.setStatus('current')
netBotzAudioTooHighTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 4)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioTooHighTooLong.setStatus('current')
netBotzAudioTooHighTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 104)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioTooHighTooLongRTN.setStatus('current')
netBotzAudioTooLowTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 5)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioTooLowTooLong.setStatus('current')
netBotzAudioTooLowForTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 105)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioTooLowForTooLongRTN.setStatus('current')
netBotzAudioUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioUnplugged.setStatus('current')
netBotzAudioReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioReplugged.setStatus('current')
netBotzAudioIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 8)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioIncreasingTooQuickly.setStatus('current')
netBotzAudioNotIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 108)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioNotIncreasingTooQuickly.setStatus('current')
netBotzAudioDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 9)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioDecreasingTooQuickly.setStatus('current')
netBotzAudioNotDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 109)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioNotDecreasingTooQuickly.setStatus('current')
netBotzAudioError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioError.setStatus('current')
netBotzAudioErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 5, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAudioErrorRTN.setStatus('current')
netBotzAmpDetectTooHigh = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 2)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectTooHigh.setStatus('current')
netBotzAmpDetectTooHighRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 102)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectTooHighRTN.setStatus('current')
netBotzAmpDetectTooLow = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 3)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectTooLow.setStatus('current')
netBotzAmpDetectTooLowRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 103)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectTooLowRTN.setStatus('current')
netBotzAmpDetectTooHighTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 4)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectTooHighTooLong.setStatus('current')
netBotzAmpDetectTooHighTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 104)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectTooHighTooLongRTN.setStatus('current')
netBotzAmpDetectTooLowTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 5)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectTooLowTooLong.setStatus('current')
netBotzAmpDetectTooLowForTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 105)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectTooLowForTooLongRTN.setStatus('current')
netBotzAmpDetectUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectUnplugged.setStatus('current')
netBotzAmpDetectReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectReplugged.setStatus('current')
netBotzAmpDetectIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 8)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectIncreasingTooQuickly.setStatus('current')
netBotzAmpDetectNotIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 108)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectNotIncreasingTooQuickly.setStatus('current')
netBotzAmpDetectDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 9)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectDecreasingTooQuickly.setStatus('current')
netBotzAmpDetectNotDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 109)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectNotDecreasingTooQuickly.setStatus('current')
netBotzAmpDetectError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectError.setStatus('current')
netBotzAmpDetectErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 6, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzAmpDetectErrorRTN.setStatus('current')
netBotzDryContactUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 7, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDryContactUnplugged.setStatus('current')
netBotzDryContactReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 7, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDryContactReplugged.setStatus('current')
netBotzDryContactError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 7, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDryContactError.setStatus('current')
netBotzDryContactErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 7, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDryContactErrorRTN.setStatus('current')
netBotzDryContactValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 7, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDryContactValueError.setStatus('current')
netBotzDryContactValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 7, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDryContactValueErrorRTN.setStatus('current')
netBotzCameraMotionSensorUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 8, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzCameraMotionSensorUnplugged.setStatus('current')
netBotzCameraMotionSensorReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 8, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzCameraMotionSensorReplugged.setStatus('current')
netBotzCameraMotionSensorError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 8, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzCameraMotionSensorError.setStatus('current')
netBotzCameraMotionSensorErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 8, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzCameraMotionSensorErrorRTN.setStatus('current')
netBotzCameraMotionSensorValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 8, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzCameraMotionSensorValueError.setStatus('current')
netBotzCameraMotionSensorValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 8, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzCameraMotionSensorValueErrorRTN.setStatus('current')
netBotzDoorSensorUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 9, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDoorSensorUnplugged.setStatus('current')
netBotzDoorSensorReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 9, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDoorSensorReplugged.setStatus('current')
netBotzDoorSensorError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 9, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDoorSensorError.setStatus('current')
netBotzDoorSensorErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 9, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDoorSensorErrorRTN.setStatus('current')
netBotzDoorSensorValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 9, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDoorSensorValueError.setStatus('current')
netBotzDoorSensorValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 9, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDoorSensorValueErrorRTN.setStatus('current')
netBotzMicPlugSensorUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 10, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMicPlugSensorUnplugged.setStatus('current')
netBotzMicPlugSensorReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 10, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMicPlugSensorReplugged.setStatus('current')
netBotzMicPlugSensorError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 10, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMicPlugSensorError.setStatus('current')
netBotzMicPlugSensorErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 10, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMicPlugSensorErrorRTN.setStatus('current')
netBotzMicPlugSensorValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 10, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMicPlugSensorValueError.setStatus('current')
netBotzMicPlugSensorValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 10, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMicPlugSensorValueErrorRTN.setStatus('current')
netBotzSpeakerPlugSensorUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 11, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSpeakerPlugSensorUnplugged.setStatus('current')
netBotzSpeakerPlugSensorReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 11, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSpeakerPlugSensorReplugged.setStatus('current')
netBotzSpeakerPlugSensorError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 11, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSpeakerPlugSensorError.setStatus('current')
netBotzSpeakerPlugSensorErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 11, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSpeakerPlugSensorErrorRTN.setStatus('current')
netBotzSpeakerPlugSensorValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 11, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSpeakerPlugSensorValueError.setStatus('current')
netBotzSpeakerPlugSensorValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 11, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSpeakerPlugSensorValueErrorRTN.setStatus('current')
netBotzTVSignalSensorUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 12, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTVSignalSensorUnplugged.setStatus('current')
netBotzTVSignalSensorReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 12, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTVSignalSensorReplugged.setStatus('current')
netBotzTVSignalSensorError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 12, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTVSignalSensorError.setStatus('current')
netBotzTVSignalSensorErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 12, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTVSignalSensorErrorRTN.setStatus('current')
netBotzTVSignalSensorValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 12, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTVSignalSensorValueError.setStatus('current')
netBotzTVSignalSensorValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 12, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzTVSignalSensorValueErrorRTN.setStatus('current')
netBotzPodUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 0, 7)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPodUnplugged.setStatus('current')
netBotzPodReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 0, 107)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPodReplugged.setStatus('current')
netBotzSensorPodUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 2, 0, 7)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSensorPodUnplugged.setStatus('current')
netBotzSensorPodReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 2, 0, 107)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSensorPodReplugged.setStatus('current')
netBotzCameraPodUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 3, 0, 7)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzCameraPodUnplugged.setStatus('current')
netBotzCameraPodReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 3, 0, 107)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzCameraPodReplugged.setStatus('current')
netBotzCCTVPodUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 4, 0, 7)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzCCTVPodUnplugged.setStatus('current')
netBotzCCTVPodReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 4, 0, 107)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzCCTVPodReplugged.setStatus('current')
netBotz4to20mAPodUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 5, 0, 7)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotz4to20mAPodUnplugged.setStatus('current')
netBotz4to20mAPodReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 5, 0, 107)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotz4to20mAPodReplugged.setStatus('current')
netBotzLogonError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 0, 11)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLogonError.setStatus('current')
netBotzLogonErrorResolved = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 0, 111)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLogonErrorResolved.setStatus('current')
netBotzDriveNotFoundError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 0, 12)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDriveNotFoundError.setStatus('current')
netBotzDriveNotFoundErrorResolved = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 0, 112)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzDriveNotFoundErrorResolved.setStatus('current')
netBotzRmtLinkError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 0, 13)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzRmtLinkError.setStatus('current')
netBotzRmtLinkErrorResolved = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 3, 0, 113)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzRmtLinkErrorResolved.setStatus('current')
netBotzGPSPositionTooHigh = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 2)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionTooHigh.setStatus('current')
netBotzGPSPositionTooHighRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 102)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionTooHighRTN.setStatus('current')
netBotzGPSPositionTooLow = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 3)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionTooLow.setStatus('current')
netBotzGPSPositionTooLowRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 103)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionTooLowRTN.setStatus('current')
netBotzGPSPositionTooHighTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 4)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionTooHighTooLong.setStatus('current')
netBotzGPSPositionTooHighTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 104)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionTooHighTooLongRTN.setStatus('current')
netBotzGPSPositionTooLowTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 5)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionTooLowTooLong.setStatus('current')
netBotzGPSPositionTooLowForTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 105)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionTooLowForTooLongRTN.setStatus('current')
netBotzGPSPositionUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionUnplugged.setStatus('current')
netBotzGPSPositionReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionReplugged.setStatus('current')
netBotzGPSPositionIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 8)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionIncreasingTooQuickly.setStatus('current')
netBotzGPSPositionNotIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 108)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionNotIncreasingTooQuickly.setStatus('current')
netBotzGPSPositionDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 9)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionDecreasingTooQuickly.setStatus('current')
netBotzGPSPositionNotDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 109)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionNotDecreasingTooQuickly.setStatus('current')
netBotzGPSPositionError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionError.setStatus('current')
netBotzGPSPositionErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 13, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSPositionErrorRTN.setStatus('current')
netBotzGPSMovementTooHigh = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 2)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementTooHigh.setStatus('current')
netBotzGPSMovementTooHighRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 102)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementTooHighRTN.setStatus('current')
netBotzGPSMovementTooLow = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 3)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementTooLow.setStatus('current')
netBotzGPSMovementTooLowRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 103)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementTooLowRTN.setStatus('current')
netBotzGPSMovementTooHighTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 4)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementTooHighTooLong.setStatus('current')
netBotzGPSMovementTooHighTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 104)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementTooHighTooLongRTN.setStatus('current')
netBotzGPSMovementTooLowTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 5)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementTooLowTooLong.setStatus('current')
netBotzGPSMovementTooLowForTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 105)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementTooLowForTooLongRTN.setStatus('current')
netBotzGPSMovementUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementUnplugged.setStatus('current')
netBotzGPSMovementReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementReplugged.setStatus('current')
netBotzGPSMovementIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 8)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementIncreasingTooQuickly.setStatus('current')
netBotzGPSMovementNotIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 108)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementNotIncreasingTooQuickly.setStatus('current')
netBotzGPSMovementDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 9)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementDecreasingTooQuickly.setStatus('current')
netBotzGPSMovementNotDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 109)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementNotDecreasingTooQuickly.setStatus('current')
netBotzGPSMovementError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementError.setStatus('current')
netBotzGPSMovementErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 14, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSMovementErrorRTN.setStatus('current')
netBotzPacketDropTooHigh = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 2)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropTooHigh.setStatus('current')
netBotzPacketDropTooHighRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 102)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropTooHighRTN.setStatus('current')
netBotzPacketDropTooLow = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 3)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropTooLow.setStatus('current')
netBotzPacketDropTooLowRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 103)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropTooLowRTN.setStatus('current')
netBotzPacketDropTooHighTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 4)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropTooHighTooLong.setStatus('current')
netBotzPacketDropTooHighTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 104)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropTooHighTooLongRTN.setStatus('current')
netBotzPacketDropTooLowTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 5)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropTooLowTooLong.setStatus('current')
netBotzPacketDropTooLowForTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 105)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropTooLowForTooLongRTN.setStatus('current')
netBotzPacketDropUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropUnplugged.setStatus('current')
netBotzPacketDropReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropReplugged.setStatus('current')
netBotzPacketDropIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 8)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropIncreasingTooQuickly.setStatus('current')
netBotzPacketDropNotIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 108)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropNotIncreasingTooQuickly.setStatus('current')
netBotzPacketDropDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 9)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropDecreasingTooQuickly.setStatus('current')
netBotzPacketDropNotDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 109)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropNotDecreasingTooQuickly.setStatus('current')
netBotzPacketDropError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropError.setStatus('current')
netBotzPacketDropErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 23, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPacketDropErrorRTN.setStatus('current')
netBotzSNMPCrawlerTooHigh = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 2)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerTooHigh.setStatus('current')
netBotzSNMPCrawlerTooHighRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 102)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerTooHighRTN.setStatus('current')
netBotzSNMPCrawlerTooLow = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 3)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerTooLow.setStatus('current')
netBotzSNMPCrawlerTooLowRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 103)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerTooLowRTN.setStatus('current')
netBotzSNMPCrawlerTooHighTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 4)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerTooHighTooLong.setStatus('current')
netBotzSNMPCrawlerTooHighTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 104)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerTooHighTooLongRTN.setStatus('current')
netBotzSNMPCrawlerTooLowTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 5)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerTooLowTooLong.setStatus('current')
netBotzSNMPCrawlerTooLowForTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 105)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerTooLowForTooLongRTN.setStatus('current')
netBotzSNMPCrawlerUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerUnplugged.setStatus('current')
netBotzSNMPCrawlerReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerReplugged.setStatus('current')
netBotzSNMPCrawlerIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 8)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerIncreasingTooQuickly.setStatus('current')
netBotzSNMPCrawlerNotIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 108)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerNotIncreasingTooQuickly.setStatus('current')
netBotzSNMPCrawlerDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 9)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerDecreasingTooQuickly.setStatus('current')
netBotzSNMPCrawlerNotDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 109)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerNotDecreasingTooQuickly.setStatus('current')
netBotzSNMPCrawlerError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerError.setStatus('current')
netBotzSNMPCrawlerErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerErrorRTN.setStatus('current')
netBotzSNMPCrawlerSensorValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerSensorValueError.setStatus('current')
netBotzSNMPCrawlerSensorValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 24, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzSNMPCrawlerSensorValueErrorRTN.setStatus('current')
netBotzGPSStatusSensorUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 15, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSStatusSensorUnplugged.setStatus('current')
netBotzGPSStatusSensorReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 15, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSStatusSensorReplugged.setStatus('current')
netBotzGPSStatusSensorError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 15, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSStatusSensorError.setStatus('current')
netBotzGPSStatusSensorErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 15, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSStatusSensorErrorRTN.setStatus('current')
netBotzGPSStatusSensorValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 15, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSStatusSensorValueError.setStatus('current')
netBotzGPSStatusSensorValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 15, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzGPSStatusSensorValueErrorRTN.setStatus('current')
netBotzWirelessStatusSensorUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 22, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzWirelessStatusSensorUnplugged.setStatus('current')
netBotzWirelessStatusSensorReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 22, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzWirelessStatusSensorReplugged.setStatus('current')
netBotzWirelessStatusSensorError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 22, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzWirelessStatusSensorError.setStatus('current')
netBotzWirelessStatusSensorErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 22, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzWirelessStatusSensorErrorRTN.setStatus('current')
netBotzWirelessStatusSensorValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 22, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzWirelessStatusSensorValueError.setStatus('current')
netBotzWirelessStatusSensorValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 22, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzWirelessStatusSensorValueErrorRTN.setStatus('current')
netBotzPlugModuleStatusSensorUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 25, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPlugModuleStatusSensorUnplugged.setStatus('current')
netBotzPlugModuleStatusSensorReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 25, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPlugModuleStatusSensorReplugged.setStatus('current')
netBotzPlugModuleStatusSensorError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 25, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPlugModuleStatusSensorError.setStatus('current')
netBotzPlugModuleStatusSensorErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 25, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPlugModuleStatusSensorErrorRTN.setStatus('current')
netBotzPlugModuleStatusSensorValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 25, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPlugModuleStatusSensorValueError.setStatus('current')
netBotzPlugModuleStatusSensorValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 25, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzPlugModuleStatusSensorValueErrorRTN.setStatus('current')
netBotzOutputControlSensorUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 26, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzOutputControlSensorUnplugged.setStatus('current')
netBotzOutputControlSensorReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 26, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzOutputControlSensorReplugged.setStatus('current')
netBotzOutputControlSensorError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 26, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzOutputControlSensorError.setStatus('current')
netBotzOutputControlSensorErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 26, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzOutputControlSensorErrorRTN.setStatus('current')
netBotzOutputControlSensorValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 26, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzOutputControlSensorValueError.setStatus('current')
netBotzOutputControlSensorValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 26, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzOutputControlSensorValueErrorRTN.setStatus('current')
netBotzMultiRAESensorTooHigh = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 2)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorTooHigh.setStatus('current')
netBotzMultiRAESensorTooHighRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 102)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorTooHighRTN.setStatus('current')
netBotzMultiRAESensorTooLow = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 3)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorTooLow.setStatus('current')
netBotzMultiRAESensorTooLowRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 103)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorTooLowRTN.setStatus('current')
netBotzMultiRAESensorTooHighTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 4)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorTooHighTooLong.setStatus('current')
netBotzMultiRAESensorTooHighTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 104)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorTooHighTooLongRTN.setStatus('current')
netBotzMultiRAESensorTooLowTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 5)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorTooLowTooLong.setStatus('current')
netBotzMultiRAESensorTooLowForTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 105)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorTooLowForTooLongRTN.setStatus('current')
netBotzMultiRAESensorUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorUnplugged.setStatus('current')
netBotzMultiRAESensorReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorReplugged.setStatus('current')
netBotzMultiRAESensorIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 8)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorIncreasingTooQuickly.setStatus('current')
netBotzMultiRAESensorNotIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 108)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorNotIncreasingTooQuickly.setStatus('current')
netBotzMultiRAESensorDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 9)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorDecreasingTooQuickly.setStatus('current')
netBotzMultiRAESensorNotDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 109)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorNotDecreasingTooQuickly.setStatus('current')
netBotzMultiRAESensorError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorError.setStatus('current')
netBotzMultiRAESensorErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorErrorRTN.setStatus('current')
netBotzMultiRAESensorSensorValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorSensorValueError.setStatus('current')
netBotzMultiRAESensorSensorValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 27, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorSensorValueErrorRTN.setStatus('current')
netBotzMultiRAESensorStatusUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 28, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorStatusUnplugged.setStatus('current')
netBotzMultiRAESensorStatusReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 28, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorStatusReplugged.setStatus('current')
netBotzMultiRAESensorStatusError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 28, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorStatusError.setStatus('current')
netBotzMultiRAESensorStatusErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 28, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorStatusErrorRTN.setStatus('current')
netBotzMultiRAESensorStatusValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 28, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorStatusValueError.setStatus('current')
netBotzMultiRAESensorStatusValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 28, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAESensorStatusValueErrorRTN.setStatus('current')
netBotzMultiRAEDeviceStatusUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 29, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAEDeviceStatusUnplugged.setStatus('current')
netBotzMultiRAEDeviceStatusReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 29, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAEDeviceStatusReplugged.setStatus('current')
netBotzMultiRAEDeviceStatusError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 29, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAEDeviceStatusError.setStatus('current')
netBotzMultiRAEDeviceStatusErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 29, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAEDeviceStatusErrorRTN.setStatus('current')
netBotzMultiRAEDeviceStatusValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 29, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAEDeviceStatusValueError.setStatus('current')
netBotzMultiRAEDeviceStatusValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 29, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzMultiRAEDeviceStatusValueErrorRTN.setStatus('current')
netBotzLinkStatusSensorUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 30, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLinkStatusSensorUnplugged.setStatus('current')
netBotzLinkStatusSensorReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 30, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLinkStatusSensorReplugged.setStatus('current')
netBotzLinkStatusSensorError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 30, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLinkStatusSensorError.setStatus('current')
netBotzLinkStatusSensorErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 30, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLinkStatusSensorErrorRTN.setStatus('current')
netBotzLinkStatusSensorValueError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 30, 0, 10)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLinkStatusSensorValueError.setStatus('current')
netBotzLinkStatusSensorValueErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 30, 0, 110)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLinkStatusSensorValueErrorRTN.setStatus('current')
netBotzLoopVoltageTooHigh = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 2)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageTooHigh.setStatus('current')
netBotzLoopVoltageTooHighRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 102)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageTooHighRTN.setStatus('current')
netBotzLoopVoltageTooLow = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 3)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageTooLow.setStatus('current')
netBotzLoopVoltageTooLowRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 103)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageTooLowRTN.setStatus('current')
netBotzLoopVoltageTooHighTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 4)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageTooHighTooLong.setStatus('current')
netBotzLoopVoltageTooHighTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 104)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageTooHighTooLongRTN.setStatus('current')
netBotzLoopVoltageTooLowTooLong = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 5)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageTooLowTooLong.setStatus('current')
netBotzLoopVoltageTooLowForTooLongRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 105)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageTooLowForTooLongRTN.setStatus('current')
netBotzLoopVoltageUnplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 6)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageUnplugged.setStatus('current')
netBotzLoopVoltageReplugged = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 106)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageReplugged.setStatus('current')
netBotzLoopVoltageIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 8)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageIncreasingTooQuickly.setStatus('current')
netBotzLoopVoltageNotIncreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 108)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageNotIncreasingTooQuickly.setStatus('current')
netBotzLoopVoltageDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 9)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageDecreasingTooQuickly.setStatus('current')
netBotzLoopVoltageNotDecreasingTooQuickly = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 109)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageNotDecreasingTooQuickly.setStatus('current')
netBotzLoopVoltageError = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 1)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageError.setStatus('current')
netBotzLoopVoltageErrorRTN = NotificationType((1, 3, 6, 1, 4, 1, 5528, 100, 10, 2, 31, 0, 101)).setObjects(("NETBOTZ320-MIB", "netBotzTrapErrorID"), ("NETBOTZ320-MIB", "netBotzTrapErrorType"), ("NETBOTZ320-MIB", "netBotzTrapErrorTypeLabel"), ("NETBOTZ320-MIB", "netBotzTrapSensorID"), ("NETBOTZ320-MIB", "netBotzTrapSensorLabel"), ("NETBOTZ320-MIB", "netBotzTrapPodID"), ("NETBOTZ320-MIB", "netBotzTrapPodLabel"), ("NETBOTZ320-MIB", "netBotzTrapPortID"), ("NETBOTZ320-MIB", "netBotzTrapPortLabel"), ("NETBOTZ320-MIB", "netBotzTrapStartTime"), ("NETBOTZ320-MIB", "netBotzTrapNotifyTime"), ("NETBOTZ320-MIB", "netBotzTrapResolveTime"), ("NETBOTZ320-MIB", "netBotzTrapSeverity"), ("NETBOTZ320-MIB", "netBotzTrapSensorValue"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueInt"), ("NETBOTZ320-MIB", "netBotzTrapSensorValueFraction"))
if mibBuilder.loadTexts: netBotzLoopVoltageErrorRTN.setStatus('current')
mibBuilder.exportSymbols("NETBOTZ320-MIB", doorSwitchSensorValue=doorSwitchSensorValue, netBotzPacketDropSensorTraps=netBotzPacketDropSensorTraps, netBotzTrapSeverity=netBotzTrapSeverity, airFlowSensorIndex=airFlowSensorIndex, netBotzGPSMovementNotDecreasingTooQuickly=netBotzGPSMovementNotDecreasingTooQuickly, humiSensorErrorStatus=humiSensorErrorStatus, netBotzGPSPositionTooHigh=netBotzGPSPositionTooHigh, netBotzGPSPositionDecreasingTooQuickly=netBotzGPSPositionDecreasingTooQuickly, netBotzLinkStatusSensorValueError=netBotzLinkStatusSensorValueError, netBotzAmpDetectTooHighRTN=netBotzAmpDetectTooHighRTN, ampDetectSensorValueInt=ampDetectSensorValueInt, netBotzAudioError=netBotzAudioError, netBotzGPSPositionTooHighRTN=netBotzGPSPositionTooHighRTN, netBotzMultiRAEDeviceStatusReplugged=netBotzMultiRAEDeviceStatusReplugged, otherStateSensorIndex=otherStateSensorIndex, netBotzTVSignalSensorValueError=netBotzTVSignalSensorValueError, netBotzSNMPCrawlerSensorValueError=netBotzSNMPCrawlerSensorValueError, dryContactSensorTable=dryContactSensorTable, otherStateSensorId=otherStateSensorId, netBotzAmpDetectTooLow=netBotzAmpDetectTooLow, netBotzLoopVoltageIncreasingTooQuickly=netBotzLoopVoltageIncreasingTooQuickly, netBotzAudioTooHighRTN=netBotzAudioTooHighRTN, netBotzDryContactValueError=netBotzDryContactValueError, netBotz550Rack=netBotz550Rack, tempSensorPortId=tempSensorPortId, netBotzGPSPositionTooLowForTooLongRTN=netBotzGPSPositionTooLowForTooLongRTN, netBotzCCTVPodTraps=netBotzCCTVPodTraps, enclosureLabel=enclosureLabel, tempSensorEncId=tempSensorEncId, dewPointSensorValueInt=dewPointSensorValueInt, netBotzCCTVPodReplugged=netBotzCCTVPodReplugged, netBotzLoopVoltageUnplugged=netBotzLoopVoltageUnplugged, netBotzSNMPCrawlerIncreasingTooQuickly=netBotzSNMPCrawlerIncreasingTooQuickly, airFlowSensorValueStr=airFlowSensorValueStr, netBotzDoorSensorValueErrorRTN=netBotzDoorSensorValueErrorRTN, netBotzLoopVoltageTooHighTooLong=netBotzLoopVoltageTooHighTooLong, netBotzTempTooLowRTN=netBotzTempTooLowRTN, netBotzCameraPodUnplugged=netBotzCameraPodUnplugged, netBotz320Rack=netBotz320Rack, netBotzBotz=netBotzBotz, netBotzGPSMovementError=netBotzGPSMovementError, netBotzSpeakerPlugSensorUnplugged=netBotzSpeakerPlugSensorUnplugged, netBotzMicPlugSensorErrorRTN=netBotzMicPlugSensorErrorRTN, netBotzOutputControlSensorErrorRTN=netBotzOutputControlSensorErrorRTN, dryContactSensorEntry=dryContactSensorEntry, netBotzGPSPositionIncreasingTooQuickly=netBotzGPSPositionIncreasingTooQuickly, humiSensorValue=humiSensorValue, netBotzPacketDropReplugged=netBotzPacketDropReplugged, netBotzGPSMovementTooLowRTN=netBotzGPSMovementTooLowRTN, netBotzPacketDropTooHighTooLong=netBotzPacketDropTooHighTooLong, doorSwitchSensorLabel=doorSwitchSensorLabel, netBotzDewPointSensorTraps=netBotzDewPointSensorTraps, dinPortSupportsDryContact=dinPortSupportsDryContact, enclosureEntry=enclosureEntry, netBotzTempTooHighTooLong=netBotzTempTooHighTooLong, doorSwitchSensorId=doorSwitchSensorId, netBotz320EWall=netBotz320EWall, tempSensorValueInt=tempSensorValueInt, netBotzTempTooLowTooLong=netBotzTempTooLowTooLong, netBotzMicPlugSensorError=netBotzMicPlugSensorError, netBotz450Rack=netBotz450Rack, netBotzGPSStatusSensorReplugged=netBotzGPSStatusSensorReplugged, humiSensorPortId=humiSensorPortId, netBotzTempTooLow=netBotzTempTooLow, netBotzGPSPositionSensorTraps=netBotzGPSPositionSensorTraps, netBotzPacketDropUnplugged=netBotzPacketDropUnplugged, cameraMotionSensorErrorStatus=cameraMotionSensorErrorStatus, netBotzMicPlugSensorReplugged=netBotzMicPlugSensorReplugged, dinPortEncId=dinPortEncId, netBotzPodTraps=netBotzPodTraps, humiSensorValueInt=humiSensorValueInt, dryContactSensorValue=dryContactSensorValue, netBotzMicPlugSensorValueError=netBotzMicPlugSensorValueError, netBotzMultiRAESensorTooLowForTooLongRTN=netBotzMultiRAESensorTooLowForTooLongRTN, netBotz4to20mAPodReplugged=netBotz4to20mAPodReplugged, netBotzPacketDropTooLowForTooLongRTN=netBotzPacketDropTooLowForTooLongRTN, netBotzHumidityNotIncreasingTooQuickly=netBotzHumidityNotIncreasingTooQuickly, netBotzSNMPCrawlerNotDecreasingTooQuickly=netBotzSNMPCrawlerNotDecreasingTooQuickly, errorCondStartTimeStr=errorCondStartTimeStr, netBotzAirFlowTooLowForTooLongRTN=netBotzAirFlowTooLowForTooLongRTN, airFlowSensorValue=airFlowSensorValue, dinPortLabel=dinPortLabel, raeSystemsAreaConnect500=raeSystemsAreaConnect500, cameraMotionSensorEncId=cameraMotionSensorEncId, netBotzAirFlowSensorTraps=netBotzAirFlowSensorTraps, netBotzGPSMovementTooHighTooLongRTN=netBotzGPSMovementTooHighTooLongRTN, netBotzMultiRAESensorUnplugged=netBotzMultiRAESensorUnplugged, netBotzMultiRAESensorNotIncreasingTooQuickly=netBotzMultiRAESensorNotIncreasingTooQuickly, netBotzDoorSensorUnplugged=netBotzDoorSensorUnplugged, cameraMotionSensorTable=cameraMotionSensorTable, errorCondSeverity=errorCondSeverity, netBotzOutputControlSensorUnplugged=netBotzOutputControlSensorUnplugged, doorSwitchSensorEncId=doorSwitchSensorEncId, netBotzOutputControlSensorValueErrorRTN=netBotzOutputControlSensorValueErrorRTN, netBotzAudioReplugged=netBotzAudioReplugged, cameraMotionSensorPortId=cameraMotionSensorPortId, netBotz420Wall=netBotz420Wall, netBotzDewPointTooHighRTN=netBotzDewPointTooHighRTN, otherNumericSensorLabel=otherNumericSensorLabel, cameraMotionSensorEntry=cameraMotionSensorEntry, netBotzHumidityTooLowTooLong=netBotzHumidityTooLowTooLong, netBotzErrors=netBotzErrors, netBotzGPSPositionUnplugged=netBotzGPSPositionUnplugged, netBotzSNMPCrawlerError=netBotzSNMPCrawlerError, netBotzMultiRAESensorTooHighRTN=netBotzMultiRAESensorTooHighRTN, netBotzPacketDropError=netBotzPacketDropError, audioSensorValueInt=audioSensorValueInt, netBotzNumericSensors=netBotzNumericSensors, netBotzSNMPCrawlerDecreasingTooQuickly=netBotzSNMPCrawlerDecreasingTooQuickly, netBotzTempErrorRTN=netBotzTempErrorRTN, netBotzGPSStatusSensorValueError=netBotzGPSStatusSensorValueError, otherStateSensorLabel=otherStateSensorLabel, netBotzTempSensorTraps=netBotzTempSensorTraps, otherNumericSensorEntry=otherNumericSensorEntry, doorSwitchSensorPortId=doorSwitchSensorPortId, ampDetectSensorId=ampDetectSensorId, audioSensorTable=audioSensorTable, cameraMotionSensorValue=cameraMotionSensorValue, netBotzSNMPCrawlerTooLowRTN=netBotzSNMPCrawlerTooLowRTN, netBotzHumidityTooLow=netBotzHumidityTooLow, netBotzPodReplugged=netBotzPodReplugged, otherPortEntry=otherPortEntry, netBotzMultiRAEDeviceStatusTraps=netBotzMultiRAEDeviceStatusTraps, audioSensorId=audioSensorId, netBotzWirelessStatusSensorValueErrorRTN=netBotzWirelessStatusSensorValueErrorRTN, netBotzMicPlugSensorUnplugged=netBotzMicPlugSensorUnplugged, netBotzDoorSensorValueError=netBotzDoorSensorValueError, netBotzDryContactReplugged=netBotzDryContactReplugged, netBotzRmtLinkError=netBotzRmtLinkError, netBotzGPSStatusSensorValueErrorRTN=netBotzGPSStatusSensorValueErrorRTN, netBotzDryContactUnplugged=netBotzDryContactUnplugged, errorCondResolvedTimeStr=errorCondResolvedTimeStr, netBotzGPSPositionError=netBotzGPSPositionError, netBotzGPSMovementTooLow=netBotzGPSMovementTooLow, humiSensorEntry=humiSensorEntry, netBotzSNMPCrawlerSensorTraps=netBotzSNMPCrawlerSensorTraps, netBotzMultiRAESensorErrorRTN=netBotzMultiRAESensorErrorRTN, netBotzLoopVoltageTooLowForTooLongRTN=netBotzLoopVoltageTooLowForTooLongRTN, humiSensorLabel=humiSensorLabel, netBotzPlugModuleStatusSensorReplugged=netBotzPlugModuleStatusSensorReplugged, netBotzAudioTooHighTooLong=netBotzAudioTooHighTooLong, netBotzHumidityErrorRTN=netBotzHumidityErrorRTN, netBotzSNMPCrawlerTooHighRTN=netBotzSNMPCrawlerTooHighRTN, dinPortSupportsAverage=dinPortSupportsAverage, dewPointSensorPortId=dewPointSensorPortId, netBotzCameraMotionSensorReplugged=netBotzCameraMotionSensorReplugged, netBotzPacketDropNotDecreasingTooQuickly=netBotzPacketDropNotDecreasingTooQuickly, netBotzWirelessStatusSensorTraps=netBotzWirelessStatusSensorTraps, netBotzMultiRAESensorTooHigh=netBotzMultiRAESensorTooHigh, netBotzAudioDecreasingTooQuickly=netBotzAudioDecreasingTooQuickly, netBotzAmpDetectTooLowRTN=netBotzAmpDetectTooLowRTN, netBotzLinkStatusSensorReplugged=netBotzLinkStatusSensorReplugged, netBotzTempReplugged=netBotzTempReplugged, netBotzDewPointTooLowTooLong=netBotzDewPointTooLowTooLong, netBotzHumidityTooHighTooLongRTN=netBotzHumidityTooHighTooLongRTN, netBotzGPSMovementUnplugged=netBotzGPSMovementUnplugged, otherNumericSensorEncId=otherNumericSensorEncId, netBotzAirFlowIncreasingTooQuickly=netBotzAirFlowIncreasingTooQuickly, dewPointSensorLabel=dewPointSensorLabel, tempSensorValueIntF=tempSensorValueIntF, netBotzGPSStatusSensorError=netBotzGPSStatusSensorError, netBotzCameraMotionSensorValueErrorRTN=netBotzCameraMotionSensorValueErrorRTN, otherNumericSensorValueStr=otherNumericSensorValueStr, netBotzSensorTraps=netBotzSensorTraps, OperStatus=OperStatus, otherStateSensorEntry=otherStateSensorEntry, netBotzHumidityIncreasingTooQuickly=netBotzHumidityIncreasingTooQuickly, otherNumericSensorValue=otherNumericSensorValue, tempSensorEntry=tempSensorEntry, airFlowSensorValueInt=airFlowSensorValueInt, netBotzHumidityUnplugged=netBotzHumidityUnplugged, dewPointSensorEncId=dewPointSensorEncId, netBotzTrapSensorID=netBotzTrapSensorID, netBotzPacketDropTooLowTooLong=netBotzPacketDropTooLowTooLong, otherStateSensorEncId=otherStateSensorEncId, netBotzMultiRAESensorStatusValueError=netBotzMultiRAESensorStatusValueError, netBotzDewPointIncreasingTooQuickly=netBotzDewPointIncreasingTooQuickly, netBotzSensorPodUnplugged=netBotzSensorPodUnplugged, netBotzDryContactValueErrorRTN=netBotzDryContactValueErrorRTN, netBotzLogonError=netBotzLogonError, netBotzMultiRAESensorNotDecreasingTooQuickly=netBotzMultiRAESensorNotDecreasingTooQuickly, netBotzMultiRAESensorStatusErrorRTN=netBotzMultiRAESensorStatusErrorRTN, dewPointSensorId=dewPointSensorId, cameraMotionSensorLabel=cameraMotionSensorLabel, netBotzPacketDropTooHigh=netBotzPacketDropTooHigh, netBotzMultiRAESensorStatusTraps=netBotzMultiRAESensorStatusTraps, otherPortStatus=otherPortStatus, airFlowSensorPortId=airFlowSensorPortId, dewPointSensorValueStr=dewPointSensorValueStr, dryContactSensorId=dryContactSensorId, netBotzGPSMovementDecreasingTooQuickly=netBotzGPSMovementDecreasingTooQuickly, netBotzDewPointTooLowForTooLongRTN=netBotzDewPointTooLowForTooLongRTN, errorCondEncId=errorCondEncId, dinPortStatus=dinPortStatus, netBotzWirelessStatusSensorReplugged=netBotzWirelessStatusSensorReplugged, dinPortIndex=dinPortIndex, netBotzMultiRAEDeviceStatusValueErrorRTN=netBotzMultiRAEDeviceStatusValueErrorRTN, ampDetectSensorValueStr=ampDetectSensorValueStr, netBotzWirelessStatusSensorError=netBotzWirelessStatusSensorError, netBotzTempTooLowForTooLongRTN=netBotzTempTooLowForTooLongRTN, netBotzGPSMovementTooLowTooLong=netBotzGPSMovementTooLowTooLong, doorSwitchSensorTable=doorSwitchSensorTable, netBotzSpeakerPlugSensorValueError=netBotzSpeakerPlugSensorValueError, dinPortEntry=dinPortEntry, netBotzAudioNotIncreasingTooQuickly=netBotzAudioNotIncreasingTooQuickly, enclosureId=enclosureId, netBotzHumidityDecreasingTooQuickly=netBotzHumidityDecreasingTooQuickly, netBotzTVSignalSensorUnplugged=netBotzTVSignalSensorUnplugged, enclosureParentEncId=enclosureParentEncId, doorSwitchSensorValueStr=doorSwitchSensorValueStr, ampDetectSensorTable=ampDetectSensorTable, netBotzAmpDetectNotIncreasingTooQuickly=netBotzAmpDetectNotIncreasingTooQuickly, netBotzTempDecreasingTooQuickly=netBotzTempDecreasingTooQuickly, netBotzTrapPodID=netBotzTrapPodID, netBotzDoorSensorReplugged=netBotzDoorSensorReplugged, netBotzPlugModuleStatusSensorErrorRTN=netBotzPlugModuleStatusSensorErrorRTN, netBotzTVSignalSensorError=netBotzTVSignalSensorError, otherNumericSensorTable=otherNumericSensorTable, netBotz4to20mAPodUnplugged=netBotz4to20mAPodUnplugged, tempSensorLabel=tempSensorLabel, netBotzAmpDetectUnplugged=netBotzAmpDetectUnplugged, tempSensorIndex=tempSensorIndex, netBotzGPSPositionReplugged=netBotzGPSPositionReplugged, netBotzGPSStatusSensorUnplugged=netBotzGPSStatusSensorUnplugged, netBotz=netBotz, netBotzPorts=netBotzPorts, netBotzTempUnplugged=netBotzTempUnplugged, audioSensorValue=audioSensorValue, netBotzPlugModuleStatusSensorValueError=netBotzPlugModuleStatusSensorValueError, netBotzDewPointNotIncreasingTooQuickly=netBotzDewPointNotIncreasingTooQuickly, netBotz355Wall=netBotz355Wall, dryContactSensorValueStr=dryContactSensorValueStr, netBotzPodUnplugged=netBotzPodUnplugged, netBotzDewPointReplugged=netBotzDewPointReplugged, netBotzSpeakerPlugSensorReplugged=netBotzSpeakerPlugSensorReplugged, netBotzAirFlowTooLow=netBotzAirFlowTooLow, otherStateSensorValue=otherStateSensorValue, netBotzWirelessStatusSensorValueError=netBotzWirelessStatusSensorValueError, netBotzSNMPCrawlerTooHigh=netBotzSNMPCrawlerTooHigh, dryContactSensorLabel=dryContactSensorLabel, netBotzSensorPodReplugged=netBotzSensorPodReplugged, netBotzLinkStatusSensorTraps=netBotzLinkStatusSensorTraps, netBotzMultiRAESensorTooLow=netBotzMultiRAESensorTooLow, audioSensorEncId=audioSensorEncId, netBotzMicPlugSensorValueErrorRTN=netBotzMicPlugSensorValueErrorRTN, netBotzHumidityReplugged=netBotzHumidityReplugged, netBotzDewPointError=netBotzDewPointError, otherStateSensorPortId=otherStateSensorPortId, dewPointSensorValueIntF=dewPointSensorValueIntF, netBotz320ERack=netBotz320ERack, netBotzPacketDropTooLowRTN=netBotzPacketDropTooLowRTN, errorCondTypeId=errorCondTypeId, netBotzDryContactSensorTraps=netBotzDryContactSensorTraps, netBotzAudioTooLowForTooLongRTN=netBotzAudioTooLowForTooLongRTN)
mibBuilder.exportSymbols("NETBOTZ320-MIB", netBotzDoorSensorError=netBotzDoorSensorError, netBotzPlugModuleStatusSensorError=netBotzPlugModuleStatusSensorError, netBotzAirFlowTooHighTooLong=netBotzAirFlowTooHighTooLong, netBotzOutputControlSensorError=netBotzOutputControlSensorError, errorCondId=errorCondId, netBotzLoopVoltageTooHigh=netBotzLoopVoltageTooHigh, ampDetectSensorErrorStatus=ampDetectSensorErrorStatus, netBotzGPSMovementNotIncreasingTooQuickly=netBotzGPSMovementNotIncreasingTooQuickly, netBotzDewPointTooHighTooLongRTN=netBotzDewPointTooHighTooLongRTN, netBotzAirFlowError=netBotzAirFlowError, netBotzPacketDropDecreasingTooQuickly=netBotzPacketDropDecreasingTooQuickly, netBotzGPSPositionTooLow=netBotzGPSPositionTooLow, otherNumericSensorValueIntX1000000=otherNumericSensorValueIntX1000000, doorSwitchSensorEntry=doorSwitchSensorEntry, netBotzAudioTooHighTooLongRTN=netBotzAudioTooHighTooLongRTN, ampDetectSensorEncId=ampDetectSensorEncId, netBotzTrapSensorValueFraction=netBotzTrapSensorValueFraction, netBotzGPSPositionTooHighTooLongRTN=netBotzGPSPositionTooHighTooLongRTN, audioSensorLabel=audioSensorLabel, netBotzTrapResolveTime=netBotzTrapResolveTime, netBotzMultiRAEDeviceStatusUnplugged=netBotzMultiRAEDeviceStatusUnplugged, netBotzLoopVoltageTooHighRTN=netBotzLoopVoltageTooHighRTN, netBotzPlugModuleStatusSensorUnplugged=netBotzPlugModuleStatusSensorUnplugged, netBotzLoopVoltageTooLowRTN=netBotzLoopVoltageTooLowRTN, netBotzGPSMovementIncreasingTooQuickly=netBotzGPSMovementIncreasingTooQuickly, netBotzLoopVoltageNotDecreasingTooQuickly=netBotzLoopVoltageNotDecreasingTooQuickly, netBotzMultiRAEDeviceStatusValueError=netBotzMultiRAEDeviceStatusValueError, airFlowSensorErrorStatus=airFlowSensorErrorStatus, netBotzAudioTooHigh=netBotzAudioTooHigh, netBotzHumidityTooLowForTooLongRTN=netBotzHumidityTooLowForTooLongRTN, netBotzLoopVoltageReplugged=netBotzLoopVoltageReplugged, netBotzPacketDropNotIncreasingTooQuickly=netBotzPacketDropNotIncreasingTooQuickly, netBotzMultiRAESensorReplugged=netBotzMultiRAESensorReplugged, netBotzTrapStartTime=netBotzTrapStartTime, netBotzPacketDropTooHighTooLongRTN=netBotzPacketDropTooHighTooLongRTN, netBotz455Wall=netBotz455Wall, netBotzTraps=netBotzTraps, netBotzAmpDetectTooHigh=netBotzAmpDetectTooHigh, tempSensorErrorStatus=tempSensorErrorStatus, errorCondStartTime=errorCondStartTime, cameraMotionSensorIndex=cameraMotionSensorIndex, netBotzGPSMovementTooHighRTN=netBotzGPSMovementTooHighRTN, netBotzLoopVoltageError=netBotzLoopVoltageError, enclosureErrorStatus=enclosureErrorStatus, ampDetectSensorPortId=ampDetectSensorPortId, netBotzAirFlowErrorRTN=netBotzAirFlowErrorRTN, audioSensorPortId=audioSensorPortId, otherPortEncId=otherPortEncId, otherNumericSensorId=otherNumericSensorId, netBotzSNMPCrawlerReplugged=netBotzSNMPCrawlerReplugged, netBotzAudioNotDecreasingTooQuickly=netBotzAudioNotDecreasingTooQuickly, netBotzPlugModuleStatusSensorTraps=netBotzPlugModuleStatusSensorTraps, netBotzMultiRAESensorDecreasingTooQuickly=netBotzMultiRAESensorDecreasingTooQuickly, netBotzCameraMotionSensorError=netBotzCameraMotionSensorError, netBotzCameraPodReplugged=netBotzCameraPodReplugged, netBotzAmpDetectErrorRTN=netBotzAmpDetectErrorRTN, netBotzMultiRAESensorTooHighTooLongRTN=netBotzMultiRAESensorTooHighTooLongRTN, netBotzAirFlowTooHigh=netBotzAirFlowTooHigh, netBotz220Camera=netBotz220Camera, netBotzTVSignalSensorErrorRTN=netBotzTVSignalSensorErrorRTN, netBotzTrapPortLabel=netBotzTrapPortLabel, netBotzSNMPCrawlerSensorValueErrorRTN=netBotzSNMPCrawlerSensorValueErrorRTN, enclosureIndex=enclosureIndex, netBotzSpeakerPlugSensorTraps=netBotzSpeakerPlugSensorTraps, netBotzCameraPodTraps=netBotzCameraPodTraps, netBotzHumidityNotDecreasingTooQuickly=netBotzHumidityNotDecreasingTooQuickly, netBotzPacketDropTooLow=netBotzPacketDropTooLow, netBotzHumidityError=netBotzHumidityError, apprion500=apprion500, otherStateSensorTable=otherStateSensorTable, netBotzMultiRAESensorStatusUnplugged=netBotzMultiRAESensorStatusUnplugged, netBotzGPSMovementSensorTraps=netBotzGPSMovementSensorTraps, netBotzWirelessStatusSensorErrorRTN=netBotzWirelessStatusSensorErrorRTN, dryContactSensorEncId=dryContactSensorEncId, cameraMotionSensorId=cameraMotionSensorId, netBotzHumidityTooHighTooLong=netBotzHumidityTooHighTooLong, airFlowSensorTable=airFlowSensorTable, dewPointSensorValue=dewPointSensorValue, netBotzAirFlowTooLowTooLong=netBotzAirFlowTooLowTooLong, netBotzAudioIncreasingTooQuickly=netBotzAudioIncreasingTooQuickly, humiSensorIndex=humiSensorIndex, otherNumericSensorErrorStatus=otherNumericSensorErrorStatus, otherNumericSensorUnits=otherNumericSensorUnits, netBotzDewPointTooLow=netBotzDewPointTooLow, ampDetectSensorEntry=ampDetectSensorEntry, netBotzMultiRAEDeviceStatusErrorRTN=netBotzMultiRAEDeviceStatusErrorRTN, netBotzSNMPCrawlerErrorRTN=netBotzSNMPCrawlerErrorRTN, netBotzHumiditySensorTraps=netBotzHumiditySensorTraps, netBotzGPSStatusSensorErrorRTN=netBotzGPSStatusSensorErrorRTN, netBotzMultiRAESensorTooHighTooLong=netBotzMultiRAESensorTooHighTooLong, errorCondResolved=errorCondResolved, netBotzLoopVoltageTooLowTooLong=netBotzLoopVoltageTooLowTooLong, otherNumericSensorIndex=otherNumericSensorIndex, netBotzAmpDetectError=netBotzAmpDetectError, netBotzLoopVoltageTooLow=netBotzLoopVoltageTooLow, netBotzTempError=netBotzTempError, netBotzAmpDetectSensorTraps=netBotzAmpDetectSensorTraps, netBotzTrapNotifyTime=netBotzTrapNotifyTime, netBotzTrapSensorLabel=netBotzTrapSensorLabel, dinPortSupportsRMS=dinPortSupportsRMS, netBotzDriveNotFoundError=netBotzDriveNotFoundError, avocent500=avocent500, netBotzPlugModuleStatusSensorValueErrorRTN=netBotzPlugModuleStatusSensorValueErrorRTN, dewPointSensorTable=dewPointSensorTable, netBotzGPSMovementTooHighTooLong=netBotzGPSMovementTooHighTooLong, enclosureDockedToEncId=enclosureDockedToEncId, netBotzSNMPCrawlerTooLow=netBotzSNMPCrawlerTooLow, otherPortLabel=otherPortLabel, netBotzGPSPositionNotIncreasingTooQuickly=netBotzGPSPositionNotIncreasingTooQuickly, netBotzMultiRAESensorStatusValueErrorRTN=netBotzMultiRAESensorStatusValueErrorRTN, netBotzSpeakerPlugSensorErrorRTN=netBotzSpeakerPlugSensorErrorRTN, netBotzOutputControlSensorTraps=netBotzOutputControlSensorTraps, humiSensorTable=humiSensorTable, netBotzAmpDetectNotDecreasingTooQuickly=netBotzAmpDetectNotDecreasingTooQuickly, netBotzMultiRAESensorError=netBotzMultiRAESensorError, netBotzDoorSensorErrorRTN=netBotzDoorSensorErrorRTN, netBotzCameraMotionSensorValueError=netBotzCameraMotionSensorValueError, netBotzAirFlowUnplugged=netBotzAirFlowUnplugged, netBotzMultiRAESensorTraps=netBotzMultiRAESensorTraps, netBotz_APC=netBotz_APC, netBotzCameraMotionSensorUnplugged=netBotzCameraMotionSensorUnplugged, netBotzGPSMovementTooHigh=netBotzGPSMovementTooHigh, humiSensorValueStr=humiSensorValueStr, otherNumericSensorValueInt=otherNumericSensorValueInt, netBotzTVSignalSensorTraps=netBotzTVSignalSensorTraps, netBotzAmpDetectTooLowTooLong=netBotzAmpDetectTooLowTooLong, netBotzTrapPortID=netBotzTrapPortID, netBotzMultiRAESensorIncreasingTooQuickly=netBotzMultiRAESensorIncreasingTooQuickly, netBotzAmpDetectTooHighTooLong=netBotzAmpDetectTooHighTooLong, netBotzAmpDetectReplugged=netBotzAmpDetectReplugged, enclosureTable=enclosureTable, netBotzHumidityTooHighRTN=netBotzHumidityTooHighRTN, doorSwitchSensorIndex=doorSwitchSensorIndex, dryContactSensorErrorStatus=dryContactSensorErrorStatus, errorCondEntry=errorCondEntry, netBotzGenericTraps=netBotzGenericTraps, netBotzMultiRAESensorTooLowRTN=netBotzMultiRAESensorTooLowRTN, otherPortTable=otherPortTable, netBotzMultiRAESensorStatusError=netBotzMultiRAESensorStatusError, BoolValue=BoolValue, netBotzLoopVoltageSensorTraps=netBotzLoopVoltageSensorTraps, netBotzDriveNotFoundErrorResolved=netBotzDriveNotFoundErrorResolved, audioSensorEntry=audioSensorEntry, netBotzGPSMovementReplugged=netBotzGPSMovementReplugged, airFlowSensorEntry=airFlowSensorEntry, ampDetectSensorLabel=ampDetectSensorLabel, errorCondResolvedTime=errorCondResolvedTime, dinPortTable=dinPortTable, airFlowSensorEncId=airFlowSensorEncId, netBotzGPSMovementErrorRTN=netBotzGPSMovementErrorRTN, netBotzOutputControlSensorValueError=netBotzOutputControlSensorValueError, netBotzPortTraps=netBotzPortTraps, netBotzAirFlowNotIncreasingTooQuickly=netBotzAirFlowNotIncreasingTooQuickly, netBotzTVSignalSensorValueErrorRTN=netBotzTVSignalSensorValueErrorRTN, errorCondSensorId=errorCondSensorId, netBotzTempIncreasingTooQuickly=netBotzTempIncreasingTooQuickly, netBotzAmpDetectTooHighTooLongRTN=netBotzAmpDetectTooHighTooLongRTN, netBotzDewPointTooHigh=netBotzDewPointTooHigh, ampDetectSensorValue=ampDetectSensorValue, netBotzGPSPositionNotDecreasingTooQuickly=netBotzGPSPositionNotDecreasingTooQuickly, netBotzTrapSensorValueInt=netBotzTrapSensorValueInt, netBotzDewPointUnplugged=netBotzDewPointUnplugged, netBotzSpeakerPlugSensorValueErrorRTN=netBotzSpeakerPlugSensorValueErrorRTN, netBotzTempTooHighTooLongRTN=netBotzTempTooHighTooLongRTN, dewPointSensorErrorStatus=dewPointSensorErrorStatus, dinPortSensorIdSuffix=dinPortSensorIdSuffix, netBotzDewPointDecreasingTooQuickly=netBotzDewPointDecreasingTooQuickly, netBotzHumidityTooHigh=netBotzHumidityTooHigh, netBotzMultiRAEDeviceStatusError=netBotzMultiRAEDeviceStatusError, otherNumericSensorPortId=otherNumericSensorPortId, netBotzAPC=netBotzAPC, netBotzTrapParms=netBotzTrapParms, netBotz320Wall=netBotz320Wall, netBotzLinkStatusSensorUnplugged=netBotzLinkStatusSensorUnplugged, netBotzGPSPositionTooHighTooLong=netBotzGPSPositionTooHighTooLong, cameraMotionSensorValueStr=cameraMotionSensorValueStr, netBotzLinkStatusSensorErrorRTN=netBotzLinkStatusSensorErrorRTN, netBotzTrapPodLabel=netBotzTrapPodLabel, ErrorStatus=ErrorStatus, otherPortId=otherPortId, humiSensorEncId=humiSensorEncId, audioSensorIndex=audioSensorIndex, netBotzGPSPositionErrorRTN=netBotzGPSPositionErrorRTN, netBotzLinkStatusSensorError=netBotzLinkStatusSensorError, netBotzDryContactErrorRTN=netBotzDryContactErrorRTN, netBotzLoopVoltageNotIncreasingTooQuickly=netBotzLoopVoltageNotIncreasingTooQuickly, netBotzPacketDropErrorRTN=netBotzPacketDropErrorRTN, netBotzEnclosures=netBotzEnclosures, netBotzAudioUnplugged=netBotzAudioUnplugged, netBotzAirFlowDecreasingTooQuickly=netBotzAirFlowDecreasingTooQuickly, netBotzLoopVoltageTooHighTooLongRTN=netBotzLoopVoltageTooHighTooLongRTN, netBotzGPSMovementTooLowForTooLongRTN=netBotzGPSMovementTooLowForTooLongRTN, netBotzCameraMotionSensorErrorRTN=netBotzCameraMotionSensorErrorRTN, netBotzDryContactError=netBotzDryContactError, netBotz420EWall=netBotz420EWall, netBotzDewPointTooLowRTN=netBotzDewPointTooLowRTN, netBotzAirFlowTooHighTooLongRTN=netBotzAirFlowTooHighTooLongRTN, netBotzTVSignalSensorReplugged=netBotzTVSignalSensorReplugged, netBotzAudioTooLowTooLong=netBotzAudioTooLowTooLong, netBotzPacketDropIncreasingTooQuickly=netBotzPacketDropIncreasingTooQuickly, netBotzAmpDetectTooLowForTooLongRTN=netBotzAmpDetectTooLowForTooLongRTN, netBotzAudioErrorRTN=netBotzAudioErrorRTN, tempSensorValueStr=tempSensorValueStr, netBotzWirelessStatusSensorUnplugged=netBotzWirelessStatusSensorUnplugged, errorCondTable=errorCondTable, netBotzSNMPCrawlerTooHighTooLong=netBotzSNMPCrawlerTooHighTooLong, netBotzPacketDropTooHighRTN=netBotzPacketDropTooHighRTN, netBotzStateSensors=netBotzStateSensors, netBotzProducts=netBotzProducts, tempSensorTable=tempSensorTable, errorCondIndex=errorCondIndex, netBotzSNMPCrawlerTooLowTooLong=netBotzSNMPCrawlerTooLowTooLong, dryContactSensorIndex=dryContactSensorIndex, netBotzBasePodTraps=netBotzBasePodTraps, netBotzDewPointErrorRTN=netBotzDewPointErrorRTN, netBotzCCTVPodUnplugged=netBotzCCTVPodUnplugged, enclosureStatus=enclosureStatus, netBotzTempNotIncreasingTooQuickly=netBotzTempNotIncreasingTooQuickly, netBotzTempTooHighRTN=netBotzTempTooHighRTN, netBotzAirFlowTooLowRTN=netBotzAirFlowTooLowRTN, netBotzTempTooHigh=netBotzTempTooHigh, netBotzLogonErrorResolved=netBotzLogonErrorResolved, humiSensorId=humiSensorId, netBotzSpeakerPlugSensorError=netBotzSpeakerPlugSensorError, otherNumericSensorValueIntX1000=otherNumericSensorValueIntX1000, dryContactSensorPortId=dryContactSensorPortId, netBotzAmpDetectIncreasingTooQuickly=netBotzAmpDetectIncreasingTooQuickly, netBotzAirFlowNotDecreasingTooQuickly=netBotzAirFlowNotDecreasingTooQuickly, netBotzCameraMotionSensorTraps=netBotzCameraMotionSensorTraps, netBotzDewPointTooHighTooLong=netBotzDewPointTooHighTooLong, netBotzSNMPCrawlerNotIncreasingTooQuickly=netBotzSNMPCrawlerNotIncreasingTooQuickly, netBotzSNMPCrawlerTooHighTooLongRTN=netBotzSNMPCrawlerTooHighTooLongRTN, netBotzSensors=netBotzSensors, netBotzTrapErrorTypeLabel=netBotzTrapErrorTypeLabel, netBotzMicPlugSensorTraps=netBotzMicPlugSensorTraps, netBotzTrapSensorValue=netBotzTrapSensorValue, netBotzAirFlowTooHighRTN=netBotzAirFlowTooHighRTN, netBotzAudioTooLowRTN=netBotzAudioTooLowRTN, netBotzMultiRAESensorTooLowTooLong=netBotzMultiRAESensorTooLowTooLong, tempSensorId=tempSensorId, netBotzWallBotz500=netBotzWallBotz500, netBotzSNMPCrawlerTooLowForTooLongRTN=netBotzSNMPCrawlerTooLowForTooLongRTN, netBotzLoopVoltageDecreasingTooQuickly=netBotzLoopVoltageDecreasingTooQuickly, dewPointSensorIndex=dewPointSensorIndex, netBotzGPSStatusSensorTraps=netBotzGPSStatusSensorTraps, netBotzGPSPositionTooLowRTN=netBotzGPSPositionTooLowRTN, netBotzAirFlowReplugged=netBotzAirFlowReplugged, otherPortIndex=otherPortIndex, tempSensorValue=tempSensorValue, netBotzTrapErrorType=netBotzTrapErrorType, otherStateSensorErrorStatus=otherStateSensorErrorStatus, dewPointSensorEntry=dewPointSensorEntry, netBotzAudioSensorTraps=netBotzAudioSensorTraps, netBotzSensorPodTraps=netBotzSensorPodTraps)
mibBuilder.exportSymbols("NETBOTZ320-MIB", netBotzAudioTooLow=netBotzAudioTooLow, netBotzAmpDetectDecreasingTooQuickly=netBotzAmpDetectDecreasingTooQuickly, netBotzRmtLinkErrorResolved=netBotzRmtLinkErrorResolved, doorSwitchSensorErrorStatus=doorSwitchSensorErrorStatus, netBotzGPSPositionTooLowTooLong=netBotzGPSPositionTooLowTooLong, airFlowSensorId=airFlowSensorId, netBotzHumidityTooLowRTN=netBotzHumidityTooLowRTN, netBotz420Rack=netBotz420Rack, PYSNMP_MODULE_ID=netBotz_APC, netBotzLoopVoltageErrorRTN=netBotzLoopVoltageErrorRTN, netBotzMultiRAESensorSensorValueError=netBotzMultiRAESensorSensorValueError, netBotzMultiRAESensorStatusReplugged=netBotzMultiRAESensorStatusReplugged, dinPortId=dinPortId, errorCondPortId=errorCondPortId, netBotzOutputControlSensorReplugged=netBotzOutputControlSensorReplugged, netBotzErrorStatus=netBotzErrorStatus, netBotzSNMPCrawlerUnplugged=netBotzSNMPCrawlerUnplugged, netBotz4to20mAPodTraps=netBotz4to20mAPodTraps, netBotzTrapErrorID=netBotzTrapErrorID, netBotzMultiRAESensorSensorValueErrorRTN=netBotzMultiRAESensorSensorValueErrorRTN, netBotzDoorSensorTraps=netBotzDoorSensorTraps, netBotzDewPointNotDecreasingTooQuickly=netBotzDewPointNotDecreasingTooQuickly, audioSensorValueStr=audioSensorValueStr, netBotzTempNotDecreasingTooQuickly=netBotzTempNotDecreasingTooQuickly, netBotzLinkStatusSensorValueErrorRTN=netBotzLinkStatusSensorValueErrorRTN, airFlowSensorLabel=airFlowSensorLabel, otherStateSensorValueStr=otherStateSensorValueStr, netBotz420ERack=netBotz420ERack, audioSensorErrorStatus=audioSensorErrorStatus, ampDetectSensorIndex=ampDetectSensorIndex)
| 323.330125
| 13,638
| 0.770599
| 26,987
| 335,940
| 9.592248
| 0.026865
| 0.234021
| 0.006304
| 0.008251
| 0.759416
| 0.756917
| 0.752598
| 0.749759
| 0.743725
| 0.736736
| 0
| 0.078931
| 0.057138
| 335,940
| 1,038
| 13,639
| 323.641619
| 0.738338
| 0.000959
| 0
| 0.004864
| 0
| 0
| 0.500064
| 0.144761
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.005837
| 0
| 0.01751
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
09bf0169966e07b0d51904c218dd1b417423a493
| 108
|
py
|
Python
|
language_acts/cms/management/commands/wt_update_index.py
|
kingsdigitallab/language-acts-docker
|
5597804d16604b525795d3cde3d968028807d7e0
|
[
"MIT"
] | null | null | null |
language_acts/cms/management/commands/wt_update_index.py
|
kingsdigitallab/language-acts-docker
|
5597804d16604b525795d3cde3d968028807d7e0
|
[
"MIT"
] | null | null | null |
language_acts/cms/management/commands/wt_update_index.py
|
kingsdigitallab/language-acts-docker
|
5597804d16604b525795d3cde3d968028807d7e0
|
[
"MIT"
] | null | null | null |
from wagtail.search.management.commands import update_index
class Command(update_index.Command):
pass
| 18
| 59
| 0.814815
| 14
| 108
| 6.142857
| 0.785714
| 0.255814
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.12037
| 108
| 5
| 60
| 21.6
| 0.905263
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
09c63973dc0745d764542a77c3b87c595ce0ecba
| 38,034
|
py
|
Python
|
SZR/apps/groups/tests/test_view.py
|
Alek96/SZR
|
6c736cded0c6de88b6e4fc5a207273ec1024365b
|
[
"MIT"
] | 1
|
2019-04-04T17:02:24.000Z
|
2019-04-04T17:02:24.000Z
|
SZR/apps/groups/tests/test_view.py
|
Alek96/SZR
|
6c736cded0c6de88b6e4fc5a207273ec1024365b
|
[
"MIT"
] | 1
|
2019-03-30T13:32:14.000Z
|
2019-03-30T13:32:14.000Z
|
SZR/apps/groups/tests/test_view.py
|
Alek96/SZR
|
6c736cded0c6de88b6e4fc5a207273ec1024365b
|
[
"MIT"
] | null | null | null |
from GitLabApi import objects
from core.tests.test_view import LoginMethods
from core.tests.test_view import SimpleUrlsTestsCases
from django.db.models import QuerySet
from django.urls import reverse
from groups import models
from groups.sidebar import GroupSidebar, FutureGroupSidebar
from groups.tests import test_forms
from groups.tests import models as test_models
class GitlabWrapperAppNameCase:
class GitlabWrapperAppNameTest(SimpleUrlsTestsCases.SimpleUrlsTests):
app_name = 'groups'
class InitSidebarPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'init_sidebar'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'sidebar.html')
self.assertIn('group', response.context)
self.assertIn('sidebar', response.context)
self.assertIsInstance(response.context['group'], objects.Group)
self.assertIsInstance(response.context['sidebar'], GroupSidebar)
class IndexPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'index'
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/index.html')
self.assertIn('group_list', response.context)
all(self.assertIsInstance(group, objects.Group) for group in response.context['group_list'])
class DetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'detail'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/detail.html')
self.assertIn('group', response.context)
self.assertIn('sidebar', response.context)
self.assertIn('unfinished_add_subgroup_list', response.context)
self.assertIn('unfinished_add_project_list', response.context)
self.assertIsInstance(response.context['group'], objects.Group)
self.assertIsInstance(response.context['sidebar'], GroupSidebar)
self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet)
self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet)
class MembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'members'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/members.html')
self.assertIn('group', response.context)
self.assertIn('sidebar', response.context)
self.assertIn('unfinished_task_list', response.context)
self.assertIsInstance(response.context['group'], objects.Group)
self.assertIsInstance(response.context['sidebar'], GroupSidebar)
self.assertIsInstance(response.context['unfinished_task_list'], QuerySet)
class TasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'tasks'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/tasks.html')
self.assertIn('group', response.context)
self.assertIn('sidebar', response.context)
self.assertIn('unfinished_task_list', response.context)
self.assertIn('finished_task_list', response.context)
self.assertIn('new_group_links', response.context)
self.assertIsInstance(response.context['group'], objects.Group)
self.assertIsInstance(response.context['sidebar'], GroupSidebar)
self.assertIsInstance(response.context['unfinished_task_list'], list)
self.assertIsInstance(response.context['finished_task_list'], list)
self.assertIsInstance(response.context['new_group_links'], list)
new_group_links = [
('New Task Group', reverse('groups:new_task_group', kwargs=self.args)),
('New Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)),
('New Project', reverse('groups:new_project_task', kwargs=self.args)),
('New Member', reverse('groups:new_member_task', kwargs=self.args))
]
for group_link in response.context['new_group_links']:
self.assertIn(group_link, new_group_links)
class NewGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_group'
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse('groups:index'))
class NewSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_subgroup'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args))
class NewTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_task_group'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args))
model = models.TaskGroup.objects.get(
gitlab_group=models.GitlabGroup.objects.get(
gitlab_id=self.args['group_id']))
for key, value in test_forms.TaskGroupFormTests.valid_form_data.items():
self.assertEqual(getattr(model, key), value)
class FutureTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_task_group'
args = {'task_id': None}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()
self.args['task_id'] = self.parent_task.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.TaskGroupFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs=self.args))
model = models.TaskGroup.objects.get(parent_task=self.parent_task)
for key, value in test_forms.TaskGroupFormTests.valid_form_data.items():
self.assertEqual(getattr(model, key), value)
class EditTaskGroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'edit_task_group'
args = {'task_group_id': 1}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()
self.task_group = test_models.AddSubgroupCreateMethods().create_task_group(
parent_task=self.parent_task
)
self.args['task_group_id'] = self.task_group.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_group_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
def _test_page_post_valid_data(self):
data = self.get_initial_form_data()
self.assertEqual(data['name'], self.task_group.name)
data['name'] = 'Another Name'
response = self.client.post(self.get_url(), data)
self.assertEqual(response.status_code, 302)
self.task_group.refresh_from_db()
self.assertEqual(self.task_group.name, data['name'])
return response
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
self.task_group.gitlab_group.gitlab_id = 42
self.task_group.gitlab_group.save()
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id}))
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_future_tasks(self):
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))
class NewSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_subgroup_task'
args = {'task_group_id': 1}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()
self.task_group = test_models.AddSubgroupCreateMethods().create_task_group(
parent_task=self.parent_task
)
self.args['task_group_id'] = self.task_group.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_group_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
def _test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.AddSubgroupFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
model = models.AddSubgroup.objects.get(task_group=self.task_group)
for key, value in test_forms.AddSubgroupFormTests.valid_form_data.items():
self.assertEqual(getattr(model, key), value)
return response
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
self.task_group.gitlab_group.gitlab_id = 42
self.task_group.gitlab_group.save()
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id}))
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))
class EditSubgroupTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'edit_subgroup_task'
args = {'task_id': 1}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()
self.task = test_models.AddSubgroupCreateMethods().create_task(
parent_task=self.parent_task)
self.args['task_id'] = self.task.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
def _test_page_post_valid_data(self):
data = self.get_initial_form_data()
self.assertEqual(data['name'], self.task.name)
data['name'] = 'Another Name'
data['description'] = 'Description'
response = self.client.post(self.get_url(), data)
self.assertEqual(response.status_code, 302)
self.task.refresh_from_db()
self.assertEqual(self.task.name, data['name'])
self.assertEqual(self.task.description, data['description'])
return response
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
self.task.gitlab_group.gitlab_id = 42
self.task.gitlab_group.save()
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id}))
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_future_tasks(self):
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))
class NewProjectPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_project'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse('groups:detail', kwargs=self.args))
class NewProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_project_task'
args = {'task_group_id': 1}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddProjectCreateMethods().create_parent_task()
self.task_group = test_models.AddProjectCreateMethods().create_task_group(
parent_task=self.parent_task
)
self.args['task_group_id'] = self.task_group.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_group_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
def _test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.AddProjectFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
model = models.AddProject.objects.get(task_group=self.task_group)
for key, value in test_forms.AddProjectFormTests.valid_form_data.items():
self.assertEqual(getattr(model, key), value)
return response
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
self.task_group.gitlab_group.gitlab_id = 42
self.task_group.gitlab_group.save()
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id}))
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))
class EditProjectTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'edit_project_task'
args = {'task_id': 1}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddProjectCreateMethods().create_parent_task()
self.task = test_models.AddProjectCreateMethods().create_task(
parent_task=self.parent_task)
self.args['task_id'] = self.task.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
def _test_page_post_valid_data(self):
data = self.get_initial_form_data()
self.assertEqual(data['name'], self.task.name)
data['name'] = 'Another Name'
data['description'] = 'Description'
response = self.client.post(self.get_url(), data)
self.assertEqual(response.status_code, 302)
self.task.refresh_from_db()
self.assertEqual(self.task.name, data['name'])
self.assertEqual(self.task.description, data['description'])
return response
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
self.task.gitlab_group.gitlab_id = 42
self.task.gitlab_group.save()
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id}))
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_future_tasks(self):
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))
class NewMemberPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_member'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse('groups:members', kwargs=self.args))
class NewMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_member_task'
args = {'task_group_id': 1}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddMemberCreateMethods().create_parent_task()
self.task_group = test_models.AddMemberCreateMethods().create_task_group(
parent_task=self.parent_task
)
self.args['task_group_id'] = self.task_group.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_group_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
def _test_page_post_valid_data(self):
response = self.client.post(self.get_url(), test_forms.AddMemberFormTests.valid_form_data)
self.assertEqual(response.status_code, 302)
model = models.AddMember.objects.get(task_group=self.task_group)
for key, value in test_forms.AddMemberFormTests.valid_form_data.items():
self.assertEqual(getattr(model, key), value)
return response
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
self.task_group.gitlab_group.gitlab_id = 42
self.task_group.gitlab_group.save()
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:tasks', kwargs={'group_id': self.task_group.gitlab_group.gitlab_id}))
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))
class EditMemberTaskPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'edit_member_task'
args = {'task_id': 1}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddMemberCreateMethods().create_parent_task()
self.task = test_models.AddMemberCreateMethods().create_task(
parent_task=self.parent_task)
self.args['task_id'] = self.task.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
def _test_page_post_valid_data(self):
data = self.get_initial_form_data()
self.assertEqual(data['username'], self.task.username)
data['username'] = 'Another username'
response = self.client.post(self.get_url(), data)
self.assertEqual(response.status_code, 302)
self.task.refresh_from_db()
self.assertEqual(self.task.username, data['username'])
return response
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_tasks(self):
self.task.gitlab_group.gitlab_id = 42
self.task.gitlab_group.save()
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:tasks', kwargs={'group_id': self.task.gitlab_group.gitlab_id}))
@LoginMethods.login_wrapper
def test_page_post_valid_data_redirect_to_future_tasks(self):
response = self._test_page_post_valid_data()
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs={'task_id': self.parent_task.id}))
class NewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_members_from_file'
args = {'group_id': '1'}
def setUp(self):
super().setUp()
for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items():
value.file.seek(0)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data,
**test_forms.MembersFromFileFormTests.valid_file_data})
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args))
class FutureNewMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_members_from_file'
args = {'task_id': None}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()
self.args['task_id'] = self.parent_task.id
for key, value in test_forms.MembersFromFileFormTests.valid_file_data.items():
value.file.seek(0)
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), {**test_forms.MembersFromFileFormTests.valid_form_data,
**test_forms.MembersFromFileFormTests.valid_file_data})
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs=self.args))
class NewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_subgroup_and_members_from_file'
args = {'group_id': '1'}
def setUp(self):
super().setUp()
for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items():
value.file.seek(0)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data,
**test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data})
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url, reverse('groups:tasks', kwargs=self.args))
class FutureNewSubgroupsAndMembersFromFilePageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'new_subgroup_and_members_from_file'
args = {'task_id': None}
def setUp(self):
super().setUp()
self.parent_task = test_models.AddSubgroupCreateMethods().create_parent_task()
self.args['task_id'] = self.parent_task.id
for key, value in test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data.items():
value.file.seek(0)
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_get(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_not_valid_data(self):
response = self.client.post(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/form_base_site.html')
@LoginMethods.login_wrapper
def test_page_post_valid_data(self):
response = self.client.post(self.get_url(), {**test_forms.SubgroupAndMembersFromFileFormTests.valid_form_data,
**test_forms.SubgroupAndMembersFromFileFormTests.valid_file_data})
self.assertEqual(response.status_code, 302)
self.assertEqual(response.url,
reverse('groups:future_group_tasks', kwargs=self.args))
class FutureGroupDetailPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'future_group_detail'
args = {'task_id': None}
def setUp(self):
super().setUp()
self.task = test_models.AddSubgroupCreateMethods().create_task()
self.args['task_id'] = self.task.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/tasks/detail.html')
self.assertIn('task', response.context)
self.assertIn('sidebar', response.context)
self.assertIn('unfinished_add_subgroup_list', response.context)
self.assertIn('unfinished_add_project_list', response.context)
self.assertIsInstance(response.context['task'], models.AddSubgroup)
self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar)
self.assertIsInstance(response.context['unfinished_add_subgroup_list'], QuerySet)
self.assertIsInstance(response.context['unfinished_add_project_list'], QuerySet)
class FutureGroupMembersPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'future_group_members'
args = {'task_id': None}
def setUp(self):
super().setUp()
self.task = test_models.AddSubgroupCreateMethods().create_task()
self.args['task_id'] = self.task.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/tasks/members.html')
self.assertIn('task', response.context)
self.assertIn('sidebar', response.context)
self.assertIn('unfinished_task_list', response.context)
self.assertIsInstance(response.context['task'], models.AddSubgroup)
self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar)
self.assertIsInstance(response.context['unfinished_task_list'], QuerySet)
class FutureGroupTasksPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'future_group_tasks'
args = {'task_id': None}
def setUp(self):
super().setUp()
self.task = test_models.AddSubgroupCreateMethods().create_task()
self.args['task_id'] = self.task.id
@LoginMethods.login_wrapper
def test_page_not_found(self):
self.args['task_id'] += 1
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 404)
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/tasks/tasks.html')
self.assertIn('task', response.context)
self.assertIn('sidebar', response.context)
self.assertIn('unfinished_task_list', response.context)
self.assertIn('finished_task_list', response.context)
self.assertIn('new_group_links', response.context)
self.assertIsInstance(response.context['task'], models.AddSubgroup)
self.assertIsInstance(response.context['sidebar'], FutureGroupSidebar)
self.assertIsInstance(response.context['unfinished_task_list'], list)
self.assertIsInstance(response.context['finished_task_list'], list)
self.assertIsInstance(response.context['new_group_links'], list)
new_group_links = [
('New Task Group', reverse('groups:new_task_group', kwargs=self.args)),
('New Subgroup', reverse('groups:new_subgroup_task', kwargs=self.args)),
('New Project', reverse('groups:new_project_task', kwargs=self.args)),
('New Member', reverse('groups:new_member_task', kwargs=self.args))
]
for group_link in response.context['new_group_links']:
self.assertIn(group_link, new_group_links)
class AjaxLoadSubgroupPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'ajax_load_subgroups'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html')
self.assertIn('group_list', response.context)
self.assertIsInstance(response.context['group_list'], list)
all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list'])
self.assertIn('project_list', response.context)
self.assertEqual(response.context['project_list'], [])
class AjaxLoadSubgroupAndProjectsPageTest(GitlabWrapperAppNameCase.GitlabWrapperAppNameTest):
name = 'ajax_load_subgroups_and_projects'
args = {'group_id': '1'}
@LoginMethods.login_wrapper
def test_page_found(self):
response = self.client.get(self.get_url())
self.assertEqual(response.status_code, 200)
self.assertTemplateUsed(response, 'groups/ajax/load_subgroups_and_projects.html')
self.assertIn('group_list', response.context)
self.assertIsInstance(response.context['group_list'], list)
all(self.assertIsInstance(group, objects.GroupSubgroup) for group in response.context['group_list'])
self.assertIn('project_list', response.context)
all(self.assertIsInstance(project, objects.GroupProject) for project in response.context['project_list'])
| 40.504792
| 119
| 0.705474
| 4,412
| 38,034
| 5.810517
| 0.034905
| 0.064363
| 0.085232
| 0.081097
| 0.924637
| 0.894952
| 0.882119
| 0.870651
| 0.86519
| 0.861835
| 0
| 0.008401
| 0.186254
| 38,034
| 938
| 120
| 40.547974
| 0.819903
| 0
| 0
| 0.862637
| 0
| 0
| 0.09828
| 0.045538
| 0
| 0
| 0
| 0
| 0.29533
| 1
| 0.135989
| false
| 0
| 0.012363
| 0
| 0.269231
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
09d4a317e5ecf2de1ddd57bbb2cdc084d1b51abd
| 79
|
py
|
Python
|
problem/01000~09999/01598/1598.py3.py
|
njw1204/BOJ-AC
|
1de41685725ae4657a7ff94e413febd97a888567
|
[
"MIT"
] | 1
|
2019-04-19T16:37:44.000Z
|
2019-04-19T16:37:44.000Z
|
problem/01000~09999/01598/1598.py3.py
|
njw1204/BOJ-AC
|
1de41685725ae4657a7ff94e413febd97a888567
|
[
"MIT"
] | 1
|
2019-04-20T11:42:44.000Z
|
2019-04-20T11:42:44.000Z
|
problem/01000~09999/01598/1598.py3.py
|
njw1204/BOJ-AC
|
1de41685725ae4657a7ff94e413febd97a888567
|
[
"MIT"
] | 3
|
2019-04-19T16:37:47.000Z
|
2021-10-25T00:45:00.000Z
|
A,B=map(int,input().split())
print(abs((A-1)%4-(B-1)%4)+abs((A-1)//4-(B-1)//4))
| 39.5
| 50
| 0.518987
| 21
| 79
| 1.952381
| 0.47619
| 0.195122
| 0.243902
| 0.292683
| 0.439024
| 0.439024
| 0.439024
| 0
| 0
| 0
| 0
| 0.102564
| 0.012658
| 79
| 2
| 50
| 39.5
| 0.423077
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0.5
| 1
| 0
| 1
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
111e7a9e76483b45ecc6087e052c71e993ec724c
| 5,051
|
py
|
Python
|
tests/test_bfs.py
|
Akards/Parallel-Python
|
00f75e4baa6953e207e7766747a6dc6750c2796c
|
[
"MIT"
] | 1
|
2020-01-21T22:34:18.000Z
|
2020-01-21T22:34:18.000Z
|
tests/test_bfs.py
|
Akards/Parallel-Python
|
00f75e4baa6953e207e7766747a6dc6750c2796c
|
[
"MIT"
] | 1
|
2020-05-06T02:19:42.000Z
|
2020-05-06T02:19:42.000Z
|
tests/test_bfs.py
|
Akards/Medusa
|
00f75e4baa6953e207e7766747a6dc6750c2796c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import unittest
import networkx as nx
from Medusa.graphs import bfs
class TestBFS(unittest.TestCase):
def test_disconnected_graph(self):
G = nx.Graph()
node_list = ['A', 'B', 'C', 'D', 'E', 'F']
G.add_nodes_from(node_list)
self.assertEqual(list(G.nodes), node_list)
bfs.breadth_first_search(G, 'A', 1)
self.assertEqual(G.nodes['A']['distance'], 0)
self.assertEqual(G.nodes['B']['distance'], -1)
self.assertEqual(G.nodes['C']['distance'], -1)
self.assertEqual(G.nodes['D']['distance'], -1)
self.assertEqual(G.nodes['E']['distance'], -1)
self.assertEqual(G.nodes['F']['distance'], -1)
def test_sequential(self):
G = nx.Graph()
node_list = ['A', 'B', 'C', 'D', 'E', 'F']
G.add_nodes_from(node_list)
edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')]
G.add_edges_from(edge_list)
bfs.breadth_first_search(G, 'A', 1)
self.assertEqual(G.nodes['A']['distance'], 0)
self.assertEqual(G.nodes['B']['distance'], 1)
self.assertEqual(G.nodes['C']['distance'], 1)
self.assertEqual(G.nodes['D']['distance'], 2)
self.assertEqual(G.nodes['E']['distance'], 2)
self.assertEqual(G.nodes['F']['distance'], 3)
def test_parallel_2(self):
G = nx.Graph()
node_list = ['A', 'B', 'C', 'D', 'E', 'F']
G.add_nodes_from(node_list)
edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')]
G.add_edges_from(edge_list)
bfs.breadth_first_search(G, 'A', 2)
self.assertEqual(G.nodes['A']['distance'], 0)
self.assertEqual(G.nodes['B']['distance'], 1)
self.assertEqual(G.nodes['C']['distance'], 1)
self.assertEqual(G.nodes['D']['distance'], 2)
self.assertEqual(G.nodes['E']['distance'], 2)
self.assertEqual(G.nodes['F']['distance'], 3)
def test_parallel_3(self):
G = nx.Graph()
node_list = ['A', 'B', 'C', 'D', 'E', 'F']
G.add_nodes_from(node_list)
edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')]
G.add_edges_from(edge_list)
bfs.breadth_first_search(G, 'A', 3)
self.assertEqual(G.nodes['A']['distance'], 0)
self.assertEqual(G.nodes['B']['distance'], 1)
self.assertEqual(G.nodes['C']['distance'], 1)
self.assertEqual(G.nodes['D']['distance'], 2)
self.assertEqual(G.nodes['E']['distance'], 2)
self.assertEqual(G.nodes['F']['distance'], 3)
def test_parallel_4(self):
G = nx.Graph()
node_list = ['A', 'B', 'C', 'D', 'E', 'F']
G.add_nodes_from(node_list)
edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')]
G.add_edges_from(edge_list)
bfs.breadth_first_search(G, 'A', 4)
self.assertEqual(G.nodes['A']['distance'], 0)
self.assertEqual(G.nodes['B']['distance'], 1)
self.assertEqual(G.nodes['C']['distance'], 1)
self.assertEqual(G.nodes['D']['distance'], 2)
self.assertEqual(G.nodes['E']['distance'], 2)
self.assertEqual(G.nodes['F']['distance'], 3)
def test_parallel_5(self):
G = nx.Graph()
node_list = ['A', 'B', 'C', 'D', 'E', 'F']
G.add_nodes_from(node_list)
edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')]
G.add_edges_from(edge_list)
bfs.breadth_first_search(G, 'A', 5)
self.assertEqual(G.nodes['A']['distance'], 0)
self.assertEqual(G.nodes['B']['distance'], 1)
self.assertEqual(G.nodes['C']['distance'], 1)
self.assertEqual(G.nodes['D']['distance'], 2)
self.assertEqual(G.nodes['E']['distance'], 2)
self.assertEqual(G.nodes['F']['distance'], 3)
def test_parallel_6(self):
G = nx.Graph()
node_list = ['A', 'B', 'C', 'D', 'E', 'F']
G.add_nodes_from(node_list)
edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')]
G.add_edges_from(edge_list)
bfs.breadth_first_search(G, 'A', 6)
self.assertEqual(G.nodes['A']['distance'], 0)
self.assertEqual(G.nodes['B']['distance'], 1)
self.assertEqual(G.nodes['C']['distance'], 1)
self.assertEqual(G.nodes['D']['distance'], 2)
self.assertEqual(G.nodes['E']['distance'], 2)
self.assertEqual(G.nodes['F']['distance'], 3)
def test_parallel_7(self):
G = nx.Graph()
node_list = ['A', 'B', 'C', 'D', 'E', 'F']
G.add_nodes_from(node_list)
edge_list = [('A','C'),('A', 'B'),('C','E'),('B', 'D'),('D','F')]
G.add_edges_from(edge_list)
bfs.breadth_first_search(G, 'A', 7)
self.assertEqual(G.nodes['A']['distance'], 0)
self.assertEqual(G.nodes['B']['distance'], 1)
self.assertEqual(G.nodes['C']['distance'], 1)
self.assertEqual(G.nodes['D']['distance'], 2)
self.assertEqual(G.nodes['E']['distance'], 2)
self.assertEqual(G.nodes['F']['distance'], 3)
| 39.155039
| 73
| 0.532964
| 719
| 5,051
| 3.616134
| 0.06815
| 0.282692
| 0.295385
| 0.387692
| 0.929615
| 0.929231
| 0.905385
| 0.905385
| 0.905385
| 0.905385
| 0
| 0.015953
| 0.218175
| 5,051
| 128
| 74
| 39.460938
| 0.642441
| 0.004158
| 0
| 0.775701
| 0
| 0
| 0.111222
| 0
| 0
| 0
| 0
| 0
| 0.457944
| 1
| 0.074766
| false
| 0
| 0.028037
| 0
| 0.11215
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
1147cac2d45b8619099a15ce365744ccd24a7534
| 9,785
|
py
|
Python
|
jsonconf/tests/commandLineTests.py
|
bponsler/jsonconf
|
a620a17a7e69479809b6878eb323086c76509d33
|
[
"MIT"
] | null | null | null |
jsonconf/tests/commandLineTests.py
|
bponsler/jsonconf
|
a620a17a7e69479809b6878eb323086c76509d33
|
[
"MIT"
] | null | null | null |
jsonconf/tests/commandLineTests.py
|
bponsler/jsonconf
|
a620a17a7e69479809b6878eb323086c76509d33
|
[
"MIT"
] | null | null | null |
from unittest import TestCase
from jsonconf import CommandLineParser
class ConfigTests(TestCase):
def setUp(self):
pass
def test_constructor(self):
parser = CommandLineParser()
self.assertTrue(parser is not None)
self.assertEqual(parser.getKeywordArguments(), {})
self.assertEqual(parser.getExtraArguments(), [])
self.assertEqual(parser.getProgram(), None)
def test_emptyArgs(self):
args = []
parser = CommandLineParser()
parser.parse(args)
self.assertEqual(parser.getKeywordArguments(), {})
self.assertEqual(parser.getExtraArguments(), [])
self.assertEqual(parser.getProgram(), None)
def test_singleArg(self):
args = ["/usr/bin/whatever"]
parser = CommandLineParser()
parser.parse(args)
self.assertEqual(parser.getKeywordArguments(), {})
self.assertEqual(parser.getExtraArguments(), [])
self.assertEqual(parser.getProgram(), "/usr/bin/whatever")
def test_extraArgs(self):
extraArgs = ["one", "two", "-d", "--ignore"]
args = ["/usr/bin/whatever"]
args.extend(extraArgs)
parser = CommandLineParser()
parser.parse(args)
self.assertEqual(parser.getKeywordArguments(), {})
self.assertEqual(parser.getExtraArguments(), extraArgs)
self.assertEqual(parser.getProgram(), "/usr/bin/whatever")
def test_keyArgs(self):
kwargs = {
"one": '1',
"two": "2",
"-d": "hello",
"--ignore": '5',
}
extraArgs = []
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.parse(args)
self.assertEqual(parser.getKeywordArguments(), kwargs)
self.assertEqual(parser.getExtraArguments(), extraArgs)
self.assertEqual(parser.getProgram(), "/usr/bin/whatever")
def test_complexKey(self):
kwargs = {
"one.two.three": '1',
}
extraArgs = []
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.parse(args)
self.assertEqual(parser.getKeywordArguments(), kwargs)
self.assertEqual(parser.getExtraArguments(), extraArgs)
self.assertEqual(parser.getProgram(), "/usr/bin/whatever")
def test_both(self):
kwargs = {
"one": '1',
"two.three": '1',
}
extraArgs = ["--test", "-v"]
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.parse(args)
self.assertEqual(parser.getKeywordArguments(), kwargs)
self.assertEqual(parser.getExtraArguments(), extraArgs)
self.assertEqual(parser.getProgram(), "/usr/bin/whatever")
def test_requiredTest(self):
kwargs = {}
extraArgs = []
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.requireKey("verbose")
self.assertRaises(Exception, parser.parse, args)
def test_requiredTest2(self):
kwargs = {"--verbose": 1}
extraArgs = []
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.requireKey("--verbose")
parser.parse(args)
def test_invalidConverter(self):
kwargs = {"--verbose": "hello"}
extraArgs = []
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
# Cannot parse string to int
parser.requireKey("--verbose", int)
self.assertRaises(Exception, parser.parse, args)
def test_invalidConverter(self):
kwargs = {"--verbose": "1"}
extraArgs = []
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.requireKey("--verbose", int)
parser.parse(args)
def test_renameKeywordArguments(self):
kwargs = {"--verbose": "1"}
extraArgs = []
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.renameKeys("verbose", ["-v", "--verbose", "verbose", "verb"])
parser.parse(args)
self.assertEqual(parser.get("verbose"), "1")
self.assertEqual(parser.getExtraArguments(), [])
kwargs = {"-v": "1"}
extraArgs = []
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.renameKeys("verbose", ["-v", "--verbose", "verbose", "verb"])
parser.parse(args)
self.assertEqual(parser.get("verbose"), "1")
self.assertEqual(parser.getExtraArguments(), [])
kwargs = {"verbose": "1"}
extraArgs = []
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.renameKeys("verbose", ["-v", "--verbose", "verbose", "verb"])
parser.parse(args)
self.assertEqual(parser.get("verbose"), "1")
self.assertEqual(parser.getExtraArguments(), [])
kwargs = {"verb": "1"}
extraArgs = []
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.renameKeys("verbose", ["-v", "--verbose", "verbose", "verb"])
parser.parse(args)
self.assertEqual(parser.get("verbose"), "1")
self.assertEqual(parser.getExtraArguments(), [])
kwargs = {"verbose": "1", "--verbose": "1", "-v": "1", "verb": "1"}
extraArgs = []
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.renameKeys("verbose", ["-v", "--verbose", "verbose", "verb"])
parser.parse(args)
self.assertEqual(parser.get("verbose"), "1")
self.assertEqual(parser.getExtraArguments(), [])
def test_renameExtraArguments(self):
kwargs = {}
extraArgs = ["-v"]
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.renameKeys("verbose", ["-v", "--verbose", "verbose", "verb"])
parser.parse(args)
self.assertEqual(parser.getKeywordArguments(), {})
self.assertEqual(parser.getExtraArguments(), ["verbose"])
extraArgs = ["--verbose"]
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.renameKeys("verbose", ["-v", "--verbose", "verbose", "verb"])
parser.parse(args)
self.assertEqual(parser.getKeywordArguments(), {})
self.assertEqual(parser.getExtraArguments(), ["verbose"])
extraArgs = ["verbose"]
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.renameKeys("verbose", ["-v", "--verbose", "verbose", "verb"])
parser.parse(args)
self.assertEqual(parser.getKeywordArguments(), {})
self.assertEqual(parser.getExtraArguments(), ["verbose"])
extraArgs = ["verb"]
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.renameKeys("verbose", ["-v", "--verbose", "verbose", "verb"])
parser.parse(args)
self.assertEqual(parser.getKeywordArguments(), {})
self.assertEqual(parser.getExtraArguments(), ["verbose"])
extraArgs = ["-v", "--verbose", "verb", "verbose"]
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.renameKeys("verbose", ["-v", "--verbose", "verbose", "verb"])
parser.parse(args)
self.assertEqual(parser.getKeywordArguments(), {})
self.assertEqual(parser.getExtraArguments(), ["verbose"])
def test_renameOtherArgs(self):
kwargs = {"test": "255"}
extraArgs = ["--verbose", "otherArg"]
args = ["/usr/bin/whatever"]
args.extend(map(lambda i: "%s=%s" % (i[0], i[1]), kwargs.items()))
args.extend(extraArgs)
parser = CommandLineParser()
parser.renameKeys("verbose", ["-v", "--verbose", "verbose", "verb"])
parser.parse(args)
self.assertEqual(parser.getKeywordArguments(), {"test": "255"})
self.assertEqual(parser.getExtraArguments(), ["verbose", "otherArg"])
| 31.16242
| 77
| 0.565968
| 963
| 9,785
| 5.736241
| 0.073728
| 0.116763
| 0.163469
| 0.06517
| 0.891926
| 0.873642
| 0.870384
| 0.870384
| 0.85735
| 0.836713
| 0
| 0.008747
| 0.252223
| 9,785
| 313
| 78
| 31.261981
| 0.746207
| 0.002657
| 0
| 0.792793
| 0
| 0
| 0.122374
| 0
| 0
| 0
| 0
| 0
| 0.207207
| 1
| 0.067568
| false
| 0.004505
| 0.009009
| 0
| 0.081081
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fefddd9980e1a08dba069a50445eaacb2d332cd6
| 13,200
|
py
|
Python
|
haas/tests/test_text_test_result.py
|
itziakos/haas
|
93a2e886f66d7fb40f39305032cad6614fcc52a1
|
[
"BSD-3-Clause"
] | 4
|
2017-10-10T06:45:35.000Z
|
2021-02-27T09:44:16.000Z
|
haas/tests/test_text_test_result.py
|
itziakos/haas
|
93a2e886f66d7fb40f39305032cad6614fcc52a1
|
[
"BSD-3-Clause"
] | 34
|
2015-02-24T17:04:15.000Z
|
2017-01-05T12:35:14.000Z
|
haas/tests/test_text_test_result.py
|
itziakos/haas
|
93a2e886f66d7fb40f39305032cad6614fcc52a1
|
[
"BSD-3-Clause"
] | 4
|
2018-03-05T19:05:19.000Z
|
2019-12-11T08:42:22.000Z
|
# -*- coding: utf-8 -*-
# Copyright (c) 2013-2019 Simon Jagoe
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the 3-clause BSD license. See the LICENSE.txt file for details.
from __future__ import absolute_import, unicode_literals
from datetime import datetime, timedelta
from ..plugins.i_result_handler_plugin import IResultHandlerPlugin
from ..result import (
ResultCollector, TestResult, TestCompletionStatus, TestDuration
)
from ..testing import unittest
from . import _test_cases, _test_case_data
from .fixtures import ExcInfoFixture, MockDateTime
from .compat import mock
class TestTextTestResult(ExcInfoFixture, unittest.TestCase):
def test_result_collector_calls_handlers_start_stop_methods(self):
# Given
handler = mock.Mock(spec=IResultHandlerPlugin)
collector = ResultCollector()
collector.add_result_handler(handler)
case = _test_cases.TestCase('test_method')
# When
handler.reset_mock()
collector.startTestRun()
# Then
handler.start_test_run.assert_called_once_with()
self.assertFalse(handler.called)
self.assertFalse(handler.stop_test_run.called)
self.assertFalse(handler.start_test.called)
self.assertFalse(handler.stop_test.called)
# When
handler.reset_mock()
collector.stopTestRun()
# Then
handler.stop_test_run.assert_called_once_with()
self.assertFalse(handler.called)
self.assertFalse(handler.start_test_run.called)
self.assertFalse(handler.start_test.called)
self.assertFalse(handler.stop_test.called)
# When
handler.reset_mock()
collector.startTest(case)
# Then
handler.start_test.assert_called_once_with(case)
self.assertFalse(handler.called)
self.assertFalse(handler.start_test_run.called)
self.assertFalse(handler.stop_test_run.called)
self.assertFalse(handler.stop_test.called)
# When
handler.reset_mock()
collector.stopTest(case)
# Then
handler.stop_test.assert_called_once_with(case)
self.assertFalse(handler.called)
self.assertFalse(handler.start_test_run.called)
self.assertFalse(handler.stop_test_run.called)
self.assertFalse(handler.start_test.called)
def test_unicode_traceback(self):
# Given
handler = mock.Mock(spec=IResultHandlerPlugin)
collector = ResultCollector()
collector.add_result_handler(handler)
start_time = datetime(2015, 12, 23, 8, 14, 12)
duration = timedelta(seconds=10)
end_time = start_time + duration
expected_duration = TestDuration(start_time, end_time)
case = _test_cases.TestCase('test_method')
# When
with mock.patch('haas.result.datetime', new=MockDateTime(start_time)):
collector.startTest(case)
# Then
self.assertTrue(handler.start_test.called)
handler.start_test.reset_mock()
# Given
msg = '\N{GREEK SMALL LETTER PHI}'.encode('utf-8')
with self.failure_exc_info(msg) as exc_info:
expected_result = TestResult.from_test_case(
case, TestCompletionStatus.error, expected_duration,
exception=exc_info)
# When
with mock.patch(
'haas.result.datetime', new=MockDateTime(end_time)):
collector.addError(case, exc_info)
# Then
handler.assert_called_once_with(expected_result)
self.assertFalse(handler.start_test_run.called)
self.assertFalse(handler.stop_test_run.called)
self.assertFalse(handler.start_test.called)
self.assertFalse(handler.stop_test.called)
self.assertFalse(collector.wasSuccessful())
def test_result_collector_calls_handlers_on_error(self):
# Given
handler = mock.Mock(spec=IResultHandlerPlugin)
collector = ResultCollector()
collector.add_result_handler(handler)
start_time = datetime(2015, 12, 23, 8, 14, 12)
duration = timedelta(seconds=10)
end_time = start_time + duration
expected_duration = TestDuration(start_time, end_time)
case = _test_cases.TestCase('test_method')
# When
with mock.patch('haas.result.datetime', new=MockDateTime(start_time)):
collector.startTest(case)
# Then
self.assertTrue(handler.start_test.called)
handler.start_test.reset_mock()
# When
with self.exc_info(RuntimeError) as exc_info:
# Given
expected_result = TestResult.from_test_case(
case, TestCompletionStatus.error, expected_duration,
exception=exc_info)
# When
with mock.patch(
'haas.result.datetime', new=MockDateTime(end_time)):
collector.addError(case, exc_info)
# Then
handler.assert_called_once_with(expected_result)
self.assertFalse(handler.start_test_run.called)
self.assertFalse(handler.stop_test_run.called)
self.assertFalse(handler.start_test.called)
self.assertFalse(handler.stop_test.called)
self.assertFalse(collector.wasSuccessful())
def test_result_collector_calls_handlers_on_failure(self):
# Given
handler = mock.Mock(spec=IResultHandlerPlugin)
collector = ResultCollector()
collector.add_result_handler(handler)
start_time = datetime(2015, 12, 23, 8, 14, 12)
duration = timedelta(seconds=10)
end_time = start_time + duration
expected_duration = TestDuration(start_time, end_time)
case = _test_cases.TestCase('test_method')
# When
with mock.patch('haas.result.datetime', new=MockDateTime(start_time)):
collector.startTest(case)
# Then
self.assertTrue(handler.start_test.called)
handler.start_test.reset_mock()
# Given
with self.failure_exc_info() as exc_info:
expected_result = TestResult.from_test_case(
case, TestCompletionStatus.failure, expected_duration,
exception=exc_info)
# When
with mock.patch(
'haas.result.datetime', new=MockDateTime(end_time)):
collector.addFailure(case, exc_info)
# Then
handler.assert_called_once_with(expected_result)
self.assertFalse(handler.start_test_run.called)
self.assertFalse(handler.stop_test_run.called)
self.assertFalse(handler.start_test.called)
self.assertFalse(handler.stop_test.called)
self.assertFalse(collector.wasSuccessful())
def test_result_collector_calls_handlers_on_success(self):
# Given
handler = mock.Mock(spec=IResultHandlerPlugin)
collector = ResultCollector()
collector.add_result_handler(handler)
start_time = datetime(2015, 12, 23, 8, 14, 12)
duration = timedelta(seconds=10)
end_time = start_time + duration
expected_duration = TestDuration(start_time, end_time)
case = _test_cases.TestCase('test_method')
# When
with mock.patch('haas.result.datetime', new=MockDateTime(start_time)):
collector.startTest(case)
# Then
self.assertTrue(handler.start_test.called)
handler.start_test.reset_mock()
# Given
expected_result = TestResult.from_test_case(
case, TestCompletionStatus.success, expected_duration)
# When
with mock.patch('haas.result.datetime', new=MockDateTime(end_time)):
collector.addSuccess(case)
# Then
handler.assert_called_once_with(expected_result)
self.assertFalse(handler.start_test_run.called)
self.assertFalse(handler.stop_test_run.called)
self.assertFalse(handler.start_test.called)
self.assertFalse(handler.stop_test.called)
self.assertTrue(collector.wasSuccessful())
def test_result_collector_calls_handlers_on_skip(self):
# Given
handler = mock.Mock(spec=IResultHandlerPlugin)
collector = ResultCollector()
collector.add_result_handler(handler)
start_time = datetime(2015, 12, 23, 8, 14, 12)
duration = timedelta(seconds=10)
end_time = start_time + duration
expected_duration = TestDuration(start_time, end_time)
case = _test_cases.TestCase('test_method')
# When
with mock.patch('haas.result.datetime', new=MockDateTime(start_time)):
collector.startTest(case)
# Then
self.assertTrue(handler.start_test.called)
handler.start_test.reset_mock()
# Given
expected_result = TestResult.from_test_case(
case, TestCompletionStatus.skipped, expected_duration,
message='reason')
# When
with mock.patch('haas.result.datetime', new=MockDateTime(end_time)):
collector.addSkip(case, 'reason')
# Then
handler.assert_called_once_with(expected_result)
self.assertFalse(handler.start_test_run.called)
self.assertFalse(handler.stop_test_run.called)
self.assertFalse(handler.start_test.called)
self.assertFalse(handler.stop_test.called)
self.assertTrue(collector.wasSuccessful())
def test_result_collector_calls_handlers_on_expected_fail(self):
# Given
handler = mock.Mock(spec=IResultHandlerPlugin)
collector = ResultCollector()
collector.add_result_handler(handler)
start_time = datetime(2015, 12, 23, 8, 14, 12)
duration = timedelta(seconds=10)
end_time = start_time + duration
expected_duration = TestDuration(start_time, end_time)
case = _test_cases.TestCase('test_method')
# When
with mock.patch('haas.result.datetime', new=MockDateTime(start_time)):
collector.startTest(case)
# Then
self.assertTrue(handler.start_test.called)
handler.start_test.reset_mock()
# Given
with self.exc_info(RuntimeError) as exc_info:
expected_result = TestResult.from_test_case(
case, TestCompletionStatus.expected_failure, expected_duration,
exception=exc_info)
# When
with mock.patch(
'haas.result.datetime', new=MockDateTime(end_time)):
collector.addExpectedFailure(case, exc_info)
# Then
handler.assert_called_once_with(expected_result)
self.assertFalse(handler.start_test_run.called)
self.assertFalse(handler.stop_test_run.called)
self.assertFalse(handler.start_test.called)
self.assertFalse(handler.stop_test.called)
self.assertTrue(collector.wasSuccessful())
def test_result_collector_calls_handlers_on_unexpected_success(self):
# Given
handler = mock.Mock(spec=IResultHandlerPlugin)
collector = ResultCollector()
collector.add_result_handler(handler)
start_time = datetime(2015, 12, 23, 8, 14, 12)
duration = timedelta(seconds=10)
end_time = start_time + duration
expected_duration = TestDuration(start_time, end_time)
case = _test_cases.TestCase('test_method')
# When
with mock.patch('haas.result.datetime', new=MockDateTime(start_time)):
collector.startTest(case)
# Then
self.assertTrue(handler.start_test.called)
handler.start_test.reset_mock()
# Given
expected_result = TestResult.from_test_case(
case, TestCompletionStatus.unexpected_success, expected_duration)
# When
with mock.patch('haas.result.datetime', new=MockDateTime(end_time)):
collector.addUnexpectedSuccess(case)
# Then
handler.assert_called_once_with(expected_result)
self.assertFalse(handler.start_test_run.called)
self.assertFalse(handler.stop_test_run.called)
self.assertFalse(handler.start_test.called)
self.assertFalse(handler.stop_test.called)
self.assertFalse(collector.wasSuccessful())
def test_result_collector_should_stop(self):
# Given
collector = ResultCollector()
# Then
self.assertFalse(collector.shouldStop)
# When
collector.stop()
# Then
self.assertTrue(collector.shouldStop)
def test_multiple_errors_from_one_test(self):
# Given
collector = ResultCollector()
case = _test_case_data.TestWithTwoErrors('test_with_two_errors')
start_time = datetime(2016, 4, 12, 8, 17, 32)
test_end_time = datetime(2016, 4, 12, 8, 17, 38)
tear_down_end_time = datetime(2016, 4, 12, 8, 17, 39)
# When
with mock.patch(
'haas.result.datetime',
new=MockDateTime(
[start_time, test_end_time, tear_down_end_time])):
case.run(collector)
# Then
self.assertEqual(len(collector.errors), 2)
| 36.263736
| 79
| 0.663182
| 1,440
| 13,200
| 5.827083
| 0.104861
| 0.087594
| 0.115362
| 0.110118
| 0.850912
| 0.843642
| 0.839471
| 0.832678
| 0.820164
| 0.819569
| 0
| 0.015436
| 0.249091
| 13,200
| 363
| 80
| 36.363636
| 0.831114
| 0.039394
| 0
| 0.771186
| 0
| 0
| 0.035754
| 0
| 0
| 0
| 0
| 0
| 0.305085
| 1
| 0.042373
| false
| 0
| 0.033898
| 0
| 0.080508
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3a01f99953c117fe45a5829d0696fd5dc39fb698
| 12,552
|
py
|
Python
|
src/university/models.py
|
Nikhilgupta18/practice-react_django
|
4226345a10c528308d13629907952e841621badc
|
[
"MIT"
] | null | null | null |
src/university/models.py
|
Nikhilgupta18/practice-react_django
|
4226345a10c528308d13629907952e841621badc
|
[
"MIT"
] | 11
|
2020-09-07T15:48:40.000Z
|
2022-03-08T23:06:16.000Z
|
src/university/models.py
|
Nikhilgupta18/practice-react_django
|
4226345a10c528308d13629907952e841621badc
|
[
"MIT"
] | null | null | null |
from django.db import models
from account.models import Country
from django.contrib import admin
grad_streams_list = [
'Engineering',
'Law',
'Medicine',
'Business',
]
grad_streams = (
('Engineering', 'Engineering'),
('Law', 'Law'),
('Medicine', 'Medicine'),
('Business', 'Business'),
)
class GRE(models.Model):
verbal = models.IntegerField(default=None, null=True, blank=True)
quant = models.IntegerField(default=None, null=True, blank=True)
awa = models.FloatField(default=None, null=True, blank=True)
def __str__(self):
return str(self.verbal)
class MCAT(models.Model):
old_total = models.IntegerField(default=None, null=True, blank=True)
new_total = models.IntegerField(default=None, null=True, blank=True)
chemical_physical = models.IntegerField(default=None, null=True, blank=True)
critical_analysis = models.IntegerField(default=None, null=True, blank=True)
biologic_biochemical = models.IntegerField(default=None, null=True, blank=True)
psycho_social_biological = models.IntegerField(default=None, null=True, blank=True)
def __str__(self):
return str(self.new_total)
class University(models.Model):
name = models.TextField(default=None)
info_link = models.TextField(default=None, null=True)
rank = models.IntegerField(default=None, null=True, blank=True)
country = models.ForeignKey(Country, on_delete=models.CASCADE)
total_students = models.IntegerField(default=None, null=True, blank=True)
total_int_students = models.IntegerField(default=None, null=True, blank=True)
address = models.TextField(default=None, null=True, blank=True)
website = models.TextField(default=None, null=True, blank=True, max_length=500)
schools = models.TextField(default=None, null=True, blank=True)
uni_type = models.TextField(default=None, null=True, blank=True)
grad_school_link = models.TextField(default=None, null=True, blank=True, max_length=500)
undergrad_link = models.TextField(default=None, null=True, blank=True, max_length=500)
business_link = models.TextField(default=None, null=True, blank=True, max_length=500)
med_link = models.TextField(default=None, null=True, blank=True, max_length=500)
law_link = models.TextField(default=None, null=True, blank=True, max_length=500)
engg_link = models.TextField(default=None, null=True, blank=True, max_length=500)
slug = models.SlugField(default=None, null=True, blank=True, max_length=500)
logo = models.TextField(default=None, null=True, blank=True, max_length=500)
def __str__(self):
return self.name
class UniversityAdmin(admin.ModelAdmin):
search_fields = ('name',)
ordering = ('rank',)
class BusinessGrad(models.Model):
university = models.OneToOneField(University, on_delete=models.CASCADE)
enrollment = models.IntegerField(default=None, null=True, blank=True)
international = models.FloatField(default=None, null=True, blank=True)
male = models.FloatField(default=None, null=True, blank=True)
female = models.FloatField(default=None, null=True, blank=True)
acceptance_rate_masters = models.FloatField(default=None, null=True, blank=True) #
acceptance_rate_phd = models.FloatField(default=None, null=True, blank=True) #
us_application_fee = models.IntegerField(default=None, null=True, blank=True) #
int_application_fee = models.IntegerField(default=None, null=True, blank=True) #
tuition = models.FloatField(default=None, null=True, blank=True)
us_deadline = models.DateTimeField(default=None, null=True, blank=True)
int_deadline = models.DateTimeField(default=None, null=True, blank=True)
rolling = models.BooleanField(default=False)
gpa = models.FloatField(default=None, null=True, blank=True)
min_toefl_score = models.IntegerField(default=None, null=True, blank=True)
mean_toefl_score = models.IntegerField(default=None, null=True, blank=True)
min_ielts_score = models.FloatField(default=None, null=True, blank=True)
fin_aid_director_name = models.TextField(default=None, null=True, blank=True)
fin_aid_director_phone = models.TextField(default=None, null=True, blank=True)
fellowships = models.IntegerField(default=None, null=True, blank=True)
teaching_assistantships = models.IntegerField(default=None, null=True, blank=True)
research_assistantships = models.IntegerField(default=None, null=True, blank=True)
# look for room and board
living_expenses = models.IntegerField(default=None, null=True, blank=True)
# unique to business
employed = models.FloatField(default=None, null=True, blank=True)
employed_3_months = models.FloatField(default=None, null=True, blank=True)
avg_work_ex_months = models.IntegerField(default=None, null=True, blank=True)
gmat = models.IntegerField(default=None, null=True, blank=True)
gre = models.OneToOneField(GRE, on_delete=models.CASCADE) #
avg_salary = models.IntegerField(default=None, null=True, blank=True)
def __str__(self):
return self.university.name
class BusinessGradAdmin(admin.ModelAdmin):
search_fields = ('university__name',)
ordering = ('university__rank',)
class EngineeringGrad(models.Model):
university = models.OneToOneField(University, on_delete=models.CASCADE) #
enrollment = models.IntegerField(default=None, null=True, blank=True) #
us_application_fee = models.IntegerField(default=None, null=True, blank=True) #
int_application_fee = models.IntegerField(default=None, null=True, blank=True) #
international = models.FloatField(default=None, null=True, blank=True) #
male = models.FloatField(default=None, null=True, blank=True) #
female = models.FloatField(default=None, null=True, blank=True) #
acceptance_rate_masters = models.FloatField(default=None, null=True, blank=True) #
acceptance_rate_phd = models.FloatField(default=None, null=True, blank=True) #
tuition = models.FloatField(default=None, null=True, blank=True) #
us_deadline = models.DateTimeField(default=None, null=True, blank=True) #
int_deadline = models.DateTimeField(default=None, null=True, blank=True) #
rolling = models.BooleanField(default=False) #
gpa = models.FloatField(default=None, null=True, blank=True) #
min_toefl_score = models.IntegerField(default=None, null=True, blank=True) #
mean_toefl_score = models.IntegerField(default=None, null=True, blank=True) #
min_ielts_score = models.FloatField(default=None, null=True, blank=True) #
fin_aid_director_name = models.TextField(default=None, null=True, blank=True) #
fin_aid_director_phone = models.TextField(default=None, null=True, blank=True) #
fellowships = models.IntegerField(default=None, null=True, blank=True)
teaching_assistantships = models.IntegerField(default=None, null=True, blank=True) #
research_assistantships = models.IntegerField(default=None, null=True, blank=True) #
# look for room and board
living_expenses = models.IntegerField(default=None, null=True, blank=True) #
# unique to engineering
gre = models.OneToOneField(GRE, on_delete=models.CASCADE, null=True, blank=True) #
def __str__(self):
return self.university.name
class EngineeringGradAdmin(admin.ModelAdmin):
search_fields = ('university__name',)
ordering = ('university__rank',)
class MedicineGrad(models.Model):
university = models.OneToOneField(University, on_delete=models.CASCADE)
enrollment = models.IntegerField(default=None, null=True, blank=True)
international = models.FloatField(default=None, null=True, blank=True)
us_application_fee = models.IntegerField(default=None, null=True, blank=True) #
int_application_fee = models.IntegerField(default=None, null=True, blank=True) #
acceptance_rate_masters = models.FloatField(default=None, null=True, blank=True) #
acceptance_rate_phd = models.FloatField(default=None, null=True, blank=True) #
male = models.FloatField(default=None, null=True, blank=True)
female = models.FloatField(default=None, null=True, blank=True)
tuition = models.FloatField(default=None, null=True, blank=True)
us_deadline = models.DateTimeField(default=None, null=True, blank=True)
int_deadline = models.DateTimeField(default=None, null=True, blank=True)
rolling = models.BooleanField(default=False)
gpa = models.FloatField(default=None, null=True, blank=True) #
fin_aid_director_name = models.TextField(default=None, null=True, blank=True)
fin_aid_director_phone = models.TextField(default=None, null=True, blank=True)
students_receiving_aid = models.FloatField(default=None, null=True, blank=True)
# look for room and board
living_expenses = models.IntegerField(default=None, null=True, blank=True)
# unique to medicine
mcat = models.OneToOneField(MCAT, on_delete=models.CASCADE)
def __str__(self):
return self.university.name
class MedicineGradAdmin(admin.ModelAdmin):
search_fields = ('university__name',)
ordering = ('university__rank',)
class LawGrad(models.Model):
university = models.OneToOneField(University, on_delete=models.CASCADE)
enrollment = models.IntegerField(default=None, null=True, blank=True)
international = models.FloatField(default=None, null=True, blank=True)
us_application_fee = models.IntegerField(default=None, null=True, blank=True) #
int_application_fee = models.IntegerField(default=None, null=True, blank=True) #
male = models.FloatField(default=None, null=True, blank=True)
female = models.FloatField(default=None, null=True, blank=True)
acceptance_rate = models.FloatField(default=None, null=True, blank=True)
tuition = models.FloatField(default=None, null=True, blank=True)
us_deadline = models.DateTimeField(default=None, null=True, blank=True)
int_deadline = models.DateTimeField(default=None, null=True, blank=True)
rolling = models.BooleanField(default=False)
int_rolling = models.BooleanField(default=False)
employed = models.FloatField(default=None, null=True, blank=True)
fin_aid_director_name = models.TextField(default=None, null=True, blank=True)
fin_aid_director_phone = models.TextField(default=None, null=True, blank=True)
students_receiving_aid = models.FloatField(default=None, null=True, blank=True)
gpa = models.FloatField(default=None, null=True, blank=True) #
# look for room and board
living_expenses = models.IntegerField(default=None, null=True, blank=True)
# unique to law
# look for median lsat
employed = models.FloatField(default=None, null=True, blank=True)
bar_passage_rate = models.FloatField(default=None, null=True, blank=True)
median_grant = models.IntegerField(default=None, null=True, blank=True)
lsat_score = models.IntegerField(default=None, null=True, blank=True)
median_public_salary = models.IntegerField(default=None, null=True, blank=True)
median_private_salary = models.IntegerField(default=None, null=True, blank=True)
def __str__(self):
return self.university.name
class LawGradAdmin(admin.ModelAdmin):
search_fields = ('university__name',)
ordering = ('university__rank',)
| 55.539823
| 107
| 0.666268
| 1,430
| 12,552
| 5.716084
| 0.1
| 0.149376
| 0.20186
| 0.255689
| 0.892219
| 0.881576
| 0.881576
| 0.876193
| 0.786396
| 0.714093
| 0
| 0.002886
| 0.227135
| 12,552
| 225
| 108
| 55.786667
| 0.839707
| 0.015137
| 0
| 0.582857
| 0
| 0
| 0.018352
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04
| false
| 0.005714
| 0.017143
| 0.04
| 0.931429
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
3a511fa9328d7a9521bf3228806419a4a51edaf6
| 25,003
|
py
|
Python
|
tests/test_forward_backward_tracking.py
|
Megscammell/Estimating-Direction
|
466a6e93b0048510690ac6e4f73ca2dfd3a4d8da
|
[
"MIT"
] | null | null | null |
tests/test_forward_backward_tracking.py
|
Megscammell/Estimating-Direction
|
466a6e93b0048510690ac6e4f73ca2dfd3a4d8da
|
[
"MIT"
] | null | null | null |
tests/test_forward_backward_tracking.py
|
Megscammell/Estimating-Direction
|
466a6e93b0048510690ac6e4f73ca2dfd3a4d8da
|
[
"MIT"
] | null | null | null |
import numpy as np
import est_dir
def test_1():
"""
Test for compute_forward() - check for flag=True.
"""
np.random.seed(90)
m = 10
f = est_dir.quad_f_noise
const_back = 0.5
const_forward = (1 / const_back)
minimizer = np.ones((m,))
centre_point = np.random.uniform(0, 20, (m, ))
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 5)
region = 1
step = 0.17741338024633116
forward_tol = 1000000
no_vars = 10
beta, func_evals = est_dir.compute_direction_LS(m, centre_point, f,
func_args, no_vars,
region)
assert(func_evals == 16)
f_old = f(np.copy(centre_point), *func_args)
f_new = f(np.copy(centre_point) - step * beta, *func_args)
track = np.array([[0, f_old], [step, f_new]])
track, count_func_evals, flag = (est_dir.compute_forward
(step, const_forward, forward_tol, track,
centre_point, beta,
f, func_args))
assert(f_old > f_new)
assert(count_func_evals == len(track) - 2)
assert(flag == True)
assert(track[0][0] == 0)
for j in range(1, len(track)):
assert(track[j][0] == step)
step = step * const_forward
if j < len(track) - 1:
assert(track[j][1] < track[j - 1][1])
else:
assert(track[j][1] > track[j - 1][1])
def test_2():
"""
Test for compute_forward() - check that when flag=False, track is returned.
"""
np.random.seed(90)
m = 2
f = est_dir.quad_f_noise
const_back = 0.5
const_forward = (1 / const_back)
minimizer = np.ones((m,))
centre_point = np.array([20, 20])
matrix = np.identity(m)
func_args = (minimizer, matrix, 0, 0.0000001)
step = 1
forward_tol = 100000
beta = np.array([0.0001, 0.0001])
f_old = f(np.copy(centre_point), *func_args)
f_new = f(np.copy(centre_point) - step * beta, *func_args)
track = np.array([[0, f_old], [step, f_new]])
test_track, count_func_evals, flag = (est_dir.compute_forward
(step, const_forward, forward_tol,
track, centre_point, beta, f,
func_args))
assert(f_old > f_new)
assert(flag == False)
assert(count_func_evals > 0)
for j in range(len(test_track)):
assert(test_track[j, 0] < forward_tol)
if j >= 1:
assert(test_track[j, 1] < test_track[j - 1, 1])
assert(test_track[j, 0] * const_forward > forward_tol)
def test_3():
"""
Test for forward_tracking - flag=True and f_new >= track[-2][1]
"""
np.random.seed(90)
m = 10
f = est_dir.quad_f_noise
const_back = 0.5
const_forward = (1 / const_back)
minimizer = np.ones((m,))
centre_point = np.random.uniform(0, 20, (m, ))
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 5)
step = 0.05
forward_tol = 1000000
no_vars = 10
region = 1
beta, func_evals = est_dir.compute_direction_LS(m, centre_point, f,
func_args, no_vars,
region)
assert(func_evals == 16)
f_old = f(np.copy(centre_point), *func_args)
f_new = f(np.copy(centre_point) - step * beta, *func_args)
assert(f_old > f_new)
track, total_func_evals, flag = (est_dir.forward_tracking
(centre_point, step, f_old, f_new, beta,
const_forward, forward_tol, f,
func_args))
assert(len(track) - 1 == total_func_evals)
assert(np.round(track[0][0], 3) == np.round(0, 3))
assert(np.round(track[1][0], 3) == np.round(step, 3))
assert(flag == True)
for j in range(2, len(track)):
step = step * 2
assert(np.round(track[j][0], 3) == step)
if j == (len(track) - 1):
assert(track[j][1] > track[j - 1][1])
else:
assert(track[j - 1][1] > track[j][1])
def test_4():
"""
Test for forward_tracking - forward_tol not met and f_new < track[-2][1].
"""
np.random.seed(25)
m = 2
f = est_dir.quad_f_noise
const_back = 0.5
const_forward = (1 / const_back)
minimizer = np.ones((m,))
centre_point = np.array([25, 25])
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 10)
t = 0.005
forward_tol = 10000
beta = np.array([1, 1])
f_old = f(np.copy(centre_point), *func_args)
f_new = f(np.copy(centre_point) - t * beta, *func_args)
assert(f_old > f_new)
track, total_func_evals, flag = (est_dir.forward_tracking
(centre_point, t, f_old, f_new, beta,
const_forward, forward_tol, f,
func_args))
assert(np.round(track[0][0], 3) == np.round(0, 3))
assert(np.round(track[1][0], 3) == np.round(t, 3))
assert(total_func_evals > 0)
assert(flag == True)
for j in range(1, len(track)):
if j == (len(track) - 1):
assert(track[j][1] > track[j-1][1])
else:
assert(track[j-1][1] > track[j][1])
def test_5():
"""
Test for forward_tracking - forward_tol not met initially, f_new <
track[-2][1] and eventually forward_tol is met.
"""
np.random.seed(25)
m = 2
f = est_dir.quad_f_noise
const_back = 0.5
const_forward = (1 / const_back)
minimizer = np.ones((m,))
centre_point = np.array([25, 25])
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 10)
t = 0.005
forward_tol = 10
beta = np.array([1, 1])
f_old = f(np.copy(centre_point), *func_args)
f_new = f(np.copy(centre_point) - t * beta, *func_args)
assert(f_old > f_new)
track, total_func_evals, flag = (est_dir.forward_tracking
(centre_point, t, f_old, f_new, beta,
const_forward, forward_tol, f,
func_args))
assert(np.round(track[0][0], 3) == np.round(0, 3))
assert(np.round(track[1][0], 3) == np.round(t, 3))
assert(total_func_evals > 0)
assert(flag == False)
for j in range(1, len(track)):
assert(track[j-1][1] > track[j][1])
def test_6():
"""
Test for forward_tracking - forward_tol met.
"""
np.random.seed(90)
m = 2
f = est_dir.quad_f_noise
const_back = 0.5
const_forward = (1 / const_back)
minimizer = np.ones((m,))
centre_point = np.array([20, 20])
matrix = np.identity(m)
func_args = (minimizer, matrix, 0, 0.0000001)
step = 0.5
forward_tol = 1.5
beta = np.array([0.0001, 0.0001])
f_old = f(np.copy(centre_point), *func_args)
f_new = f(np.copy(centre_point) - step * beta, *func_args)
track, total_func_evals, flag = (est_dir.forward_tracking
(centre_point, step, f_old, f_new, beta,
const_forward, forward_tol, f,
func_args))
assert(flag == False)
assert(track[2][1] < track[1][1] < track[0][1])
assert(total_func_evals == 1)
def test_7():
"""
Test for compute_backward - check that when flag=True, track is updated.
"""
np.random.seed(90)
m = 100
f = est_dir.quad_f_noise
const_back = 0.5
minimizer = np.random.uniform(0, 1, (m, ))
centre_point = np.random.uniform(0, 1, (m, ))
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 0.1)
step = 0.001
back_tol = 0.000001
no_vars = 10
region = 1
beta, func_evals = est_dir.compute_direction_LS(m, centre_point, f,
func_args, no_vars,
region)
assert(func_evals == 16)
f_old = f(np.copy(centre_point), *func_args)
f_new = f(np.copy(centre_point) - step * beta, *func_args)
assert(f_old > f_new)
track = np.array([[0, f_old], [step, f_new]])
track, total_func_evals, flag = (est_dir.compute_backward
(step, const_back, back_tol, track,
centre_point, beta, f, func_args))
assert(total_func_evals == len(track) - 2)
assert(flag == True)
assert(track[0][0] == 0)
for j in range(1, len(track)):
assert(track[j][0] == step)
step = step * const_back
if j < len(track) - 1:
assert(track[j][1] < track[j-1][1])
else:
assert(track[j][1] > track[j-1][1])
def test_8():
"""
Test for compute_backward - check that when flag=False,
original track is returned.
"""
np.random.seed(90)
m = 100
f = est_dir.quad_f_noise
const_back = 0.5
minimizer = np.random.uniform(0, 1, (m, ))
centre_point = np.random.uniform(0, 1, (m, ))
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 0.1)
step = 0.1
back_tol = 0.075
no_vars = 10
region = 1
beta, func_evals = est_dir.compute_direction_LS(m, centre_point, f,
func_args, no_vars,
region)
assert(func_evals == 16)
f_old = f(np.copy(centre_point), *func_args)
f_new = f(np.copy(centre_point) - step * beta, *func_args)
assert(f_old > f_new)
track = np.array([[0, f_old], [step, f_new]])
track_new, total_func_evals, flag = (est_dir.compute_backward
(step, const_back, back_tol, track,
centre_point, beta, f, func_args))
assert(np.all(track == track_new))
assert(flag == False)
assert(total_func_evals == 0)
def test_9():
"""
Test for backward_tracking - back_tol is met.
"""
np.random.seed(32964)
m = 2
f = est_dir.quad_f_noise
const_back = 0.5
minimizer = np.ones((m,))
centre_point = np.array([25, 25])
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 5)
t = 1
back_tol = 1
beta = np.array([200, 200])
f_old = f(np.copy(centre_point), *func_args)
f_new = f(np.copy(centre_point) - t * beta, *func_args)
assert(f_old < f_new)
track, count_func_evals = (est_dir.backward_tracking
(centre_point, t, f_old, f_new, beta,
const_back, back_tol, f, func_args))
assert(track.shape == (2, m))
assert(track[0][0] == 0)
assert(track[1][0] == t)
assert(track[1][0] < track[1][1])
assert(count_func_evals == 0)
def test_10():
"""
Test for backward_tracking - back tol is not met and f_new >
track[-2][1].
"""
np.random.seed(32964)
n = 6
m = 2
f = est_dir.quad_f_noise
const_back = 0.5
minimizer = np.ones((m,))
centre_point = np.random.uniform(0, 10, (m,))
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 5)
t = 97.688932389756
back_tol = 0.000000001
no_vars = m
region = 1
beta, func_evals = est_dir.compute_direction_XY(n, m, centre_point, f,
func_args, no_vars,
region)
assert(func_evals == n)
f_old = f(np.copy(centre_point), *func_args)
f_new = f(np.copy(centre_point) - t * beta, *func_args)
assert(f_old < f_new)
track, total_func_evals = (est_dir.backward_tracking
(centre_point, t, f_old, f_new, beta,
const_back, back_tol, f, func_args))
assert(np.round(track[0][0], 3) == np.round(0, 3))
assert(np.round(track[1][0], 3) == np.round(t, 3))
assert(total_func_evals > 0)
for j in range(1, len(track)):
assert(np.round(track[j][0], 4) == np.round(t, 4))
t = t / 2
assert(np.min(track[:, 1]) < track[1][0])
def test_11():
"""
Test for backward_tracking - back tol is not met and f_new < track[-2][1]
"""
np.random.seed(329998)
n = 20
m = 100
f = est_dir.quad_f_noise
const_back = 0.5
minimizer = np.random.uniform(0, 10, (m,))
centre_point = np.random.uniform(0, 10, (m,))
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 1000)
t = 17001.993794080016
back_tol = 0.000000001
no_vars = m
region = 1
beta, func_evals = est_dir.compute_direction_XY(n, m, centre_point, f,
func_args, no_vars,
region)
assert(func_evals == n)
f_old = f(np.copy(centre_point), *func_args)
f_new = f(np.copy(centre_point) - t * beta, *func_args)
assert(f_old < f_new)
track, total_func_evals = (est_dir.backward_tracking
(centre_point, t, f_old, f_new, beta,
const_back, back_tol, f, func_args))
assert(np.round(track[0][0], 3) == np.round(0, 3))
assert(np.round(track[1][0], 3) == np.round(t, 3))
assert(total_func_evals > 0)
assert(np.min(track[:, 1]) < track[:, 1][0])
def test_12():
"""
Test for backward_tracking - back tol is not initially met, f_new <
track[-2][1] and eventaully back tol is met.
"""
np.random.seed(329998)
n = 20
m = 100
f = est_dir.quad_f_noise
const_back = 0.5
minimizer = np.random.uniform(0, 10, (m,))
centre_point = np.random.uniform(0, 10, (m,))
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 1000)
t = 17001.993794080016
back_tol = 1
no_vars = m
region = 1
beta, func_evals = est_dir.compute_direction_XY(n, m, centre_point, f,
func_args, no_vars,
region)
assert(func_evals == n)
f_old = f(np.copy(centre_point), *func_args)
f_new = f(np.copy(centre_point) - t * beta, *func_args)
assert(f_old < f_new)
track, total_func_evals = (est_dir.backward_tracking
(centre_point, t, f_old, f_new, beta,
const_back, back_tol, f, func_args))
assert(np.round(track[0][0], 3) == np.round(0, 3))
assert(np.round(track[1][0], 3) == np.round(t, 3))
assert(total_func_evals > 0)
assert(np.min(track[:, 1]) < track[:, 1][0])
def test_13():
"""Test for compute_coeffs"""
track_y = np.array([100, 200, 50])
track_t = np.array([0, 1, 0.5])
design_matrix_step = np.vstack((np.repeat(track_y[0], len(track_t)),
np.array(track_t),
np.array(track_t) ** 2)).T
assert(np.all(design_matrix_step[0, :] == np.array([100, 0, 0])))
assert(np.all(design_matrix_step[1, :] == np.array([100, 1, 1])))
assert(np.all(design_matrix_step[2, :] == np.array([100, 0.5, 0.25])))
OLS = (np.linalg.inv(design_matrix_step.T @ design_matrix_step) @
design_matrix_step.T @ track_y)
check = -OLS[1] / (2 * OLS[2])
opt_t = est_dir.compute_coeffs(track_y, track_t)
assert(np.all(np.round(check, 5) == np.round(opt_t, 5)))
def test_14():
"""
Test for combine_tracking - check that correct step size is returned when
forward_tol is met.
"""
np.random.seed(90)
m = 2
f = est_dir.quad_f_noise
const_back = 0.5
const_forward = (1 / const_back)
minimizer = np.ones((m,))
centre_point = np.array([20, 20])
matrix = np.identity(m)
func_args = (minimizer, matrix, 0, 0.0000001)
step = 1
forward_tol = 100000
back_tol = 0.0000001
beta = np.array([0.0001, 0.0001])
f_old = f(np.copy(centre_point), *func_args)
upd_point, func_val, total_func_evals = (est_dir.combine_tracking
(centre_point, f_old,
beta, step, const_back,
back_tol, const_forward,
forward_tol, f,
func_args))
assert(upd_point.shape == (m, ))
assert(type(total_func_evals) is int)
assert(func_val < f_old)
def test_15():
"""
Test for combine_tracking - check that correct step size is returned, when
forward_tol is not met.
"""
np.random.seed(3291)
m = 2
f = est_dir.quad_f_noise
const_back = 0.5
const_forward = (1 / const_back)
minimizer = np.ones((m,))
centre_point = np.array([25, 25])
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 5)
step = 0.005
forward_tol = 10000
back_tol = 0.0000001
beta = np.array([1, 1])
f_old = f(np.copy(centre_point), *func_args)
upd_point, func_val, total_func_evals = (est_dir.combine_tracking
(centre_point, f_old,
beta, step, const_back,
back_tol, const_forward,
forward_tol, f,
func_args))
assert(upd_point.shape == (m, ))
assert(type(total_func_evals) is int)
assert(func_val < f_old)
def test_16():
"""
Test for combine_tracking - check that correct step size is returned,
when back_tol is met.
"""
np.random.seed(32964)
m = 2
f = est_dir.quad_f_noise
const_back = 0.5
const_forward = (1 / const_back)
minimizer = np.ones((m,))
centre_point = np.array([25, 25])
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 5)
step = 1
back_tol = 1
forward_tol = 100000
beta = np.array([200, 200])
f_old = f(np.copy(centre_point), *func_args)
upd_point, func_val, total_func_evals = (est_dir.combine_tracking
(centre_point, f_old,
beta, step, const_back,
back_tol, const_forward,
forward_tol, f,
func_args))
assert(upd_point.shape == (m, ))
assert(type(total_func_evals) is int)
assert(func_val == f_old)
def test_17():
"""
Test for combine_tracking - check that correct step size is returned,
when back_tol is not met.
"""
np.random.seed(32964)
n = 6
m = 2
f = est_dir.quad_f_noise
const_back = 0.5
const_forward = (1 / const_back)
minimizer = np.ones((m,))
centre_point = np.random.uniform(0, 10, (m,))
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 5)
step = 10
forward_tol = 1000000
back_tol = 0.000000001
no_vars = m
region = 1
beta, func_evals = est_dir.compute_direction_XY(n, m, centre_point, f,
func_args, no_vars,
region)
assert(func_evals == n)
f_old = f(np.copy(centre_point), *func_args)
upd_point, func_val, total_func_evals = (est_dir.combine_tracking
(centre_point, f_old,
beta, step, const_back,
back_tol, const_forward,
forward_tol, f,
func_args))
assert(upd_point.shape == (m, ))
assert(type(total_func_evals) is int)
assert(func_val < f_old)
def test_18():
"""Test for arrange_track_y_t"""
track = np.array([[0, 100],
[1, 80],
[2, 160],
[4, 40],
[8, 20],
[16, 90]])
track_method = 'Forward'
track_y, track_t = est_dir.arrange_track_y_t(track, track_method)
assert(np.all(track_y == np.array([100, 20, 90])))
assert(np.all(track_t == np.array([0, 8, 16])))
def test_19():
"""Test for arrange_track_y_t"""
track = np.array([[0, 100],
[1, 80],
[2, 70],
[4, 90]])
track_method = 'Forward'
track_y, track_t = est_dir.arrange_track_y_t(track, track_method)
assert(np.all(track_y == np.array([100, 70, 90])))
assert(np.all(track_t == np.array([0, 2, 4])))
def test_20():
"""Test for arrange_track_y_t"""
track = np.array([[0, 100],
[1, 120],
[0.5, 110],
[0.25, 90]])
track_method = 'Backward'
track_y, track_t = est_dir.arrange_track_y_t(track, track_method)
assert(np.all(track_y == np.array([100, 90, 110])))
assert(np.all(track_t == np.array([0, 0.25, 0.5])))
def test_21():
"""Test for arrange_track_y_t"""
track = np.array([[0, 100],
[1, 120],
[0.5, 80]])
track_method = 'Backward'
track_y, track_t = est_dir.arrange_track_y_t(track, track_method)
assert(np.all(track_y == np.array([100, 80, 120])))
assert(np.all(track_t == np.array([0, 0.5, 1])))
def test_22():
"""Test for check_func_val_coeffs when func_val > track_y[1]."""
np.random.seed(90)
m = 10
f = est_dir.quad_f_noise
minimizer = np.ones((m,))
centre_point = np.random.uniform(0, 20, (m, ))
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 60)
step = 1.8251102718712913
no_vars = 10
region = 1
beta, func_evals = est_dir.compute_direction_LS(m, centre_point, f,
func_args, no_vars,
region)
assert(func_evals == 16)
f_old = f(np.copy(centre_point), *func_args)
f_new = f(np.copy(centre_point) - step * beta, *func_args)
assert(f_old > f_new)
track = np.array([[0, 100],
[1, 160],
[2, 40],
[4, 90]])
track_method = 'Forward'
upd_point, func_val = (est_dir.check_func_val_coeffs
(track, track_method, centre_point, beta, f,
func_args))
assert(upd_point.shape == (m, ))
assert(func_val == 40)
def test_23():
"""Test for check_func_val_coeffs when func_val <= track_y[1]."""
np.random.seed(91)
m = 10
f = est_dir.quad_f_noise
const_back = 0.5
const_forward = (1 / const_back)
minimizer = np.ones((m,))
centre_point = np.random.uniform(0, 20, (m, ))
matrix = est_dir.quad_func_params(1, 10, m)
func_args = (minimizer, matrix, 0, 5)
step = 0.01
forward_tol = 1000000
no_vars = 10
region = 1
beta, func_evals = est_dir.compute_direction_LS(m, centre_point, f,
func_args, no_vars,
region)
assert(func_evals == 16)
f_old = f(np.copy(centre_point), *func_args)
f_new = f(np.copy(centre_point) - step * beta, *func_args)
assert(f_old > f_new)
track, total_func_evals, flag = (est_dir.forward_tracking
(centre_point, step, f_old, f_new, beta,
const_forward, forward_tol, f,
func_args))
assert(flag == True)
assert(total_func_evals > 0)
track_method = 'Forward'
upd_point, func_val = (est_dir.check_func_val_coeffs
(track, track_method, centre_point, beta, f,
func_args))
assert(upd_point.shape == (m, ))
assert(np.all(func_val <= track[:, 1]))
| 36.82327
| 80
| 0.517938
| 3,407
| 25,003
| 3.561198
| 0.045201
| 0.071623
| 0.016484
| 0.034287
| 0.916344
| 0.890464
| 0.870601
| 0.854859
| 0.837303
| 0.825352
| 0
| 0.062935
| 0.356877
| 25,003
| 678
| 81
| 36.877581
| 0.691604
| 0.060793
| 0
| 0.799645
| 0
| 0
| 0.001956
| 0
| 0
| 0
| 0
| 0
| 0.198582
| 1
| 0.04078
| false
| 0
| 0.003546
| 0
| 0.044326
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e93221ad9cdf9c6c7bd6f2a9d7c45743ea25df4f
| 160
|
py
|
Python
|
Framework/utilities/logger/__init__.py
|
jonreding2010/MAQS.Python
|
43f73784fa9f0ab46545b0275fa8de38d86c76ed
|
[
"MIT"
] | null | null | null |
Framework/utilities/logger/__init__.py
|
jonreding2010/MAQS.Python
|
43f73784fa9f0ab46545b0275fa8de38d86c76ed
|
[
"MIT"
] | null | null | null |
Framework/utilities/logger/__init__.py
|
jonreding2010/MAQS.Python
|
43f73784fa9f0ab46545b0275fa8de38d86c76ed
|
[
"MIT"
] | null | null | null |
from Framework.utilities.logger import LoggingEnabled
from Framework.utilities.logger import MessageType
from Framework.utilities.logger import TestResultType
| 53.333333
| 54
| 0.88125
| 18
| 160
| 7.833333
| 0.444444
| 0.276596
| 0.468085
| 0.595745
| 0.723404
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.08125
| 160
| 3
| 55
| 53.333333
| 0.959184
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
3aab81f4326a92879076ba5c1e4820591a3cd3ae
| 9,676
|
py
|
Python
|
python/veles/tests/data/test_repack.py
|
pombredanne/veles
|
e65de5a7c268129acffcdb03034efd8d256d025c
|
[
"Apache-2.0"
] | 918
|
2017-01-16T17:31:25.000Z
|
2022-03-27T07:10:31.000Z
|
python/veles/tests/data/test_repack.py
|
pombredanne/veles
|
e65de5a7c268129acffcdb03034efd8d256d025c
|
[
"Apache-2.0"
] | 193
|
2017-01-17T13:56:10.000Z
|
2020-09-01T08:29:48.000Z
|
python/veles/tests/data/test_repack.py
|
pombredanne/veles
|
e65de5a7c268129acffcdb03034efd8d256d025c
|
[
"Apache-2.0"
] | 112
|
2017-02-01T01:05:57.000Z
|
2022-03-29T07:21:12.000Z
|
# Copyright 2017 CodiLime
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from veles.data.bindata import BinData
from veles.data.repack import Endian, Repacker
class TestRepacker(unittest.TestCase):
def test_endian(self):
self.assertNotEqual(Endian.LITTLE, Endian.BIG)
def test_simple_copy(self):
r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=8)
self.assertEqual(r.repack_unit, 8)
self.assertEqual(r.repack_size(num_elements=2), 2)
self.assertEqual(r.repackable_size(from_size=2), 2)
a = BinData(8, [1, 2, 3, 4])
b = r.repack(a, start=1, num_elements=2)
self.assertEqual(b, BinData(8, [2, 3]))
self.assertEqual(r.repack(a), a)
def test_gather_8to16_little(self):
r = Repacker(endian=Endian.LITTLE, from_width=8, to_width=16)
self.assertEqual(r.repack_unit, 16)
self.assertEqual(r.repack_size(2), 4)
self.assertEqual(r.repackable_size(2), 1)
self.assertEqual(r.repackable_size(3), 1)
self.assertEqual(r.repackable_size(4), 2)
a = BinData(8, [1, 2, 3, 4, 5, 6])
b = r.repack(a, start=1, num_elements=2)
self.assertEqual(b, BinData.from_spaced_hex(16, '0302 0504'))
c = r.repack(a, start=1)
self.assertEqual(b, c)
d = r.repack(a)
self.assertEqual(d, BinData.from_spaced_hex(16, '0201 0403 0605'))
def test_gather_8to16_big(self):
r = Repacker(endian=Endian.BIG, from_width=8, to_width=16)
self.assertEqual(r.repack_unit, 16)
self.assertEqual(r.repack_size(2), 4)
self.assertEqual(r.repackable_size(2), 1)
self.assertEqual(r.repackable_size(3), 1)
self.assertEqual(r.repackable_size(4), 2)
a = BinData(8, [1, 2, 3, 4, 5, 6])
b = r.repack(a, start=1, num_elements=2)
self.assertEqual(b, BinData.from_spaced_hex(16, '0203 0405'))
c = r.repack(a, start=1)
self.assertEqual(b, c)
d = r.repack(a)
self.assertEqual(d, BinData.from_spaced_hex(16, '0102 0304 0506'))
def test_mash_8to12_little(self):
r = Repacker(Endian.LITTLE, 8, 12)
self.assertEqual(r.repack_unit, 24)
self.assertEqual(r.repack_size(1), 2)
self.assertEqual(r.repack_size(2), 3)
self.assertEqual(r.repackable_size(1), 0)
self.assertEqual(r.repackable_size(2), 1)
self.assertEqual(r.repackable_size(3), 2)
self.assertEqual(r.repackable_size(4), 2)
a = BinData.from_spaced_hex(8, '12 34 56 78 9a')
b = r.repack(a, 1, 2)
self.assertEqual(b, BinData.from_spaced_hex(12, '634 785'))
c = r.repack(a, 1)
self.assertEqual(b, c)
d = r.repack(a)
self.assertEqual(d, BinData.from_spaced_hex(12, '412 563 a78'))
def test_mash_8to12_big(self):
r = Repacker(Endian.BIG, 8, 12)
self.assertEqual(r.repack_unit, 24)
self.assertEqual(r.repack_size(1), 2)
self.assertEqual(r.repack_size(2), 3)
self.assertEqual(r.repackable_size(1), 0)
self.assertEqual(r.repackable_size(2), 1)
self.assertEqual(r.repackable_size(3), 2)
self.assertEqual(r.repackable_size(4), 2)
a = BinData.from_spaced_hex(8, '12 34 56 78 9a')
b = r.repack(a, 1, 2)
self.assertEqual(b, BinData.from_spaced_hex(12, '345 678'))
c = r.repack(a, 1)
self.assertEqual(b, c)
d = r.repack(a)
self.assertEqual(d, BinData.from_spaced_hex(12, '123 456 789'))
def test_split_8to1_little(self):
r = Repacker(Endian.LITTLE, 8, 1)
self.assertEqual(r.repack_unit, 8)
self.assertEqual(r.repack_size(12), 2)
self.assertEqual(r.repack_size(8), 1)
self.assertEqual(r.repack_size(9), 2)
self.assertEqual(r.repack_size(17), 3)
self.assertEqual(r.repackable_size(1), 8)
a = BinData.from_spaced_hex(8, '12 34 56')
b = r.repack(a, 1, 12)
c = BinData.from_spaced_hex(1, ' '.join(format(0x634, '012b')[::-1]))
self.assertEqual(b, c)
def test_split_8to1_big(self):
r = Repacker(Endian.BIG, 8, 1)
self.assertEqual(r.repack_unit, 8)
self.assertEqual(r.repack_size(12), 2)
self.assertEqual(r.repack_size(8), 1)
self.assertEqual(r.repack_size(9), 2)
self.assertEqual(r.repack_size(17), 3)
self.assertEqual(r.repackable_size(1), 8)
a = BinData.from_spaced_hex(8, '12 34 56')
b = r.repack(a, 1, 12)
c = BinData.from_spaced_hex(1, ' '.join(format(0x345, '012b')))
self.assertEqual(b, c)
def test_split_60to20_little(self):
r = Repacker(Endian.LITTLE, 60, 20)
self.assertEqual(r.repack_unit, 60)
self.assertEqual(r.repack_size(1), 1)
self.assertEqual(r.repack_size(2), 1)
self.assertEqual(r.repack_size(3), 1)
self.assertEqual(r.repack_size(4), 2)
self.assertEqual(r.repackable_size(1), 3)
a = BinData(60, [0xfedcba987654321])
b = r.repack(a)
self.assertEqual(b, BinData.from_spaced_hex(20, '54321 a9876 fedcb'))
def test_split_60to20_big(self):
r = Repacker(Endian.BIG, 60, 20)
self.assertEqual(r.repack_unit, 60)
self.assertEqual(r.repack_size(1), 1)
self.assertEqual(r.repack_size(2), 1)
self.assertEqual(r.repack_size(3), 1)
self.assertEqual(r.repack_size(4), 2)
self.assertEqual(r.repackable_size(1), 3)
a = BinData(60, [0xfedcba987654321])
b = r.repack(a)
self.assertEqual(b, BinData.from_spaced_hex(20, 'fedcb a9876 54321'))
def test_split_16to8_little(self):
r = Repacker(Endian.LITTLE, 16, 8)
self.assertEqual(r.repack_unit, 16)
self.assertEqual(r.repack_size(3), 2)
self.assertEqual(r.repackable_size(3), 6)
a = BinData(16, [0x1234, 0x5678, 0x9abc])
b = r.repack(a, 1, 3)
self.assertEqual(b, BinData.from_spaced_hex(8, '78 56 bc'))
def test_split_16to8_big(self):
r = Repacker(Endian.BIG, 16, 8)
self.assertEqual(r.repack_unit, 16)
self.assertEqual(r.repack_size(3), 2)
self.assertEqual(r.repackable_size(3), 6)
a = BinData(16, [0x1234, 0x5678, 0x9abc])
b = r.repack(a, 1, 3)
self.assertEqual(b, BinData.from_spaced_hex(8, '56 78 9a'))
def test_padded_8to23_left_little(self):
r = Repacker(Endian.LITTLE, 8, 23, high_pad=9)
self.assertEqual(r.repack_unit, 32)
self.assertEqual(r.repack_size(2), 8)
self.assertEqual(r.repackable_size(7), 1)
self.assertEqual(r.repackable_size(8), 2)
a = BinData.from_spaced_hex(8, '11 22 33 44 55 66 77 88 99 aa')
b = r.repack(a, 1, 2)
self.assertEqual(b, BinData.from_spaced_hex(23, '443322 087766'))
def test_padded_8to23_right_little(self):
r = Repacker(Endian.LITTLE, 8, 23, low_pad=9)
self.assertEqual(r.repack_unit, 32)
self.assertEqual(r.repack_size(2), 8)
self.assertEqual(r.repackable_size(7), 1)
self.assertEqual(r.repackable_size(8), 2)
a = BinData.from_spaced_hex(8, '11 22 33 44 55 66 77 88 99 aa')
b = r.repack(a, 1, 2)
self.assertEqual(b, BinData.from_spaced_hex(23, '2aa219 4cc43b'))
def test_padded_8to23_mixed_little(self):
r = Repacker(Endian.LITTLE, 8, 23, low_pad=8, high_pad=1)
self.assertEqual(r.repack_unit, 32)
self.assertEqual(r.repack_size(2), 8)
self.assertEqual(r.repackable_size(7), 1)
self.assertEqual(r.repackable_size(8), 2)
a = BinData.from_spaced_hex(8, '11 22 33 44 55 66 77 88 99 aa')
b = r.repack(a, 1, 2)
self.assertEqual(b, BinData.from_spaced_hex(23, '554433 198877'))
def test_padded_8to23_left_big(self):
r = Repacker(Endian.BIG, 8, 23, high_pad=9)
self.assertEqual(r.repack_unit, 32)
self.assertEqual(r.repack_size(2), 8)
self.assertEqual(r.repackable_size(7), 1)
self.assertEqual(r.repackable_size(8), 2)
a = BinData.from_spaced_hex(8, '11 22 33 44 55 66 77 88 99 aa')
b = r.repack(a, 1, 2)
self.assertEqual(b, BinData.from_spaced_hex(23, '334455 778899'))
def test_padded_8to23_right_big(self):
r = Repacker(Endian.BIG, 8, 23, low_pad=9)
self.assertEqual(r.repack_unit, 32)
self.assertEqual(r.repack_size(2), 8)
self.assertEqual(r.repackable_size(7), 1)
self.assertEqual(r.repackable_size(8), 2)
a = BinData.from_spaced_hex(8, '11 22 33 44 55 66 77 88 99 aa')
b = r.repack(a, 1, 2)
self.assertEqual(b, BinData.from_spaced_hex(23, '1119a2 333bc4'))
def test_padded_8to23_mixed_big(self):
r = Repacker(Endian.BIG, 8, 23, low_pad=8, high_pad=1)
self.assertEqual(r.repack_unit, 32)
self.assertEqual(r.repack_size(2), 8)
self.assertEqual(r.repackable_size(7), 1)
self.assertEqual(r.repackable_size(8), 2)
a = BinData.from_spaced_hex(8, '11 22 33 44 55 66 77 88 99 aa')
b = r.repack(a, 1, 2)
self.assertEqual(b, BinData.from_spaced_hex(23, '223344 667788'))
| 42.814159
| 77
| 0.637247
| 1,492
| 9,676
| 3.987265
| 0.124665
| 0.269793
| 0.220541
| 0.181207
| 0.830224
| 0.798454
| 0.774248
| 0.745672
| 0.733905
| 0.733905
| 0
| 0.099987
| 0.228917
| 9,676
| 225
| 78
| 43.004444
| 0.69736
| 0.055912
| 0
| 0.659686
| 0
| 0
| 0.048021
| 0
| 0
| 0
| 0.008771
| 0
| 0.565445
| 1
| 0.094241
| false
| 0
| 0.015707
| 0
| 0.115183
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3ae5cf37eea91bb44926c87f063230fb3d523036
| 901
|
py
|
Python
|
Questionnaire2/Questionnaire/Api/decorators.py
|
riverstation/project-all
|
c56f1879e1303d561e95a3ff3a70f94fb5fa2191
|
[
"Apache-2.0"
] | 2
|
2020-11-09T06:20:45.000Z
|
2021-05-10T07:03:35.000Z
|
Questionnaire2/Questionnaire/Api/decorators.py
|
riverstation/project-all
|
c56f1879e1303d561e95a3ff3a70f94fb5fa2191
|
[
"Apache-2.0"
] | null | null | null |
Questionnaire2/Questionnaire/Api/decorators.py
|
riverstation/project-all
|
c56f1879e1303d561e95a3ff3a70f94fb5fa2191
|
[
"Apache-2.0"
] | 1
|
2020-02-21T09:37:01.000Z
|
2020-02-21T09:37:01.000Z
|
import json
from Api.utils import *
def admin_required(func):
def _wrapper(self, request, *args, **kwargs):
if request.user.is_authenticated and hasattr(request.user, 'admin'):
return func(self, request, *args, **kwargs)
else:
return not_authenticated()
return _wrapper
def customer_required(func):
def _wrapper(self, request, *args, **kwargs):
if request.user.is_authenticated and hasattr(request.user, 'customer'):
return func(self, request, *args, **kwargs)
else:
return not_authenticated()
return _wrapper
def userinfo_required(func):
def _wrapper(self, request, *args, **kwargs):
if request.user.is_authenticated and hasattr(request.user, 'userinfo'):
return func(self, request, *args, **kwargs)
else:
return not_authenticated()
return _wrapper
| 29.064516
| 79
| 0.643729
| 102
| 901
| 5.539216
| 0.245098
| 0.116814
| 0.159292
| 0.223009
| 0.870796
| 0.870796
| 0.870796
| 0.870796
| 0.870796
| 0.870796
| 0
| 0
| 0.247503
| 901
| 30
| 80
| 30.033333
| 0.833333
| 0
| 0
| 0.652174
| 0
| 0
| 0.023307
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.26087
| false
| 0
| 0.086957
| 0
| 0.73913
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
3aec685a85da369122c266e967f2b26782bfff04
| 14,551
|
py
|
Python
|
openprocurement/auctions/appraisal/tests/blanks/item_blanks.py
|
bdmbdsm/openprocurement.auctions.appraisal
|
563e5e6180b03118ed9eb36a9141c7a6408669de
|
[
"Apache-2.0"
] | null | null | null |
openprocurement/auctions/appraisal/tests/blanks/item_blanks.py
|
bdmbdsm/openprocurement.auctions.appraisal
|
563e5e6180b03118ed9eb36a9141c7a6408669de
|
[
"Apache-2.0"
] | 3
|
2018-12-07T13:13:42.000Z
|
2020-01-06T19:08:23.000Z
|
openprocurement/auctions/appraisal/tests/blanks/item_blanks.py
|
bdmbdsm/openprocurement.auctions.appraisal
|
563e5e6180b03118ed9eb36a9141c7a6408669de
|
[
"Apache-2.0"
] | 6
|
2018-12-05T16:17:46.000Z
|
2019-06-28T13:10:48.000Z
|
# -*- coding: utf-8 -*-
from uuid import uuid4
from copy import deepcopy
from datetime import timedelta
from openprocurement.auctions.core.utils import calculate_business_date
from openprocurement.auctions.appraisal.models import AppraisalAuction
def check_items_listing(self):
self.app.authorization = ('Basic', ('broker', ''))
data = self.initial_data.copy()
# Auction creation
response = self.app.post_json('/auctions', {'data': data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
auction_id = response.json['data']['id']
owner_token = response.json['access']['token']
access_header = {'X-Access-Token': str(owner_token)}
self.app.patch_json(
'/auctions/{}'.format(auction_id),
{'data': {'status': 'active.tendering'}},
headers=access_header
)
response = self.app.get(
'/auctions/{}/items'.format(auction_id),
)
self.assertEqual(len(response.json['data']), len(data['items']))
# Create one item and check listing
response = self.app.post_json(
'/auctions/{}/items'.format(auction_id),
{'data': self.initial_item_data},
headers=access_header
)
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
response = self.app.get(
'/auctions/{}/items'.format(auction_id),
)
self.assertEqual(len(response.json['data']), len(data['items']) + 1)
def check_item_creation(self):
self.app.authorization = ('Basic', ('broker', ''))
data = self.initial_data.copy()
# Auction creation
response = self.app.post_json('/auctions', {'data': data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
auction_id = response.json['data']['id']
owner_token = response.json['access']['token']
access_header = {'X-Access-Token': str(owner_token)}
self.app.patch_json(
'/auctions/{}'.format(auction_id),
{'data': {'status': 'active.tendering'}},
headers=access_header
)
# Item creation
response = self.app.post_json(
'/auctions/{}/items'.format(auction_id),
{'data': self.initial_item_data},
headers=access_header
)
item_id = response.json['data']['id']
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(self.initial_item_data['id'], response.json['data']['id'])
self.assertIn(item_id, response.headers['Location'])
self.assertEqual(self.initial_item_data['description'], response.json["data"]["description"])
self.assertEqual(self.initial_item_data['quantity'], response.json["data"]["quantity"])
self.assertEqual(self.initial_item_data['address'], response.json["data"]["address"])
# Get item
response = self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id))
self.assertEqual(item_id, response.json['data']['id'])
self.assertEqual(self.initial_item_data['description'], response.json["data"]["description"])
self.assertEqual(self.initial_item_data['quantity'], response.json["data"]["quantity"])
self.assertEqual(self.initial_item_data['address'], response.json["data"]["address"])
def check_item_patch(self):
self.app.authorization = ('Basic', ('broker', ''))
data = self.initial_data.copy()
# Auction creation
response = self.app.post_json('/auctions', {'data': data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
auction_id = response.json['data']['id']
owner_token = response.json['access']['token']
access_header = {'X-Access-Token': str(owner_token)}
self.app.patch_json(
'/auctions/{}'.format(auction_id),
{'data': {'status': 'active.tendering'}},
headers=access_header
)
# Item creation
response = self.app.post_json(
'/auctions/{}/items'.format(auction_id),
{'data': self.initial_item_data},
headers=access_header
)
item_id = response.json['data']['id']
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(self.initial_item_data['id'], response.json['data']['id'])
self.assertIn(item_id, response.headers['Location'])
self.assertEqual(self.initial_item_data['description'], response.json["data"]["description"])
self.assertEqual(self.initial_item_data['quantity'], response.json["data"]["quantity"])
self.assertEqual(self.initial_item_data['address'], response.json["data"]["address"])
# Get item
response = self.app.get('/auctions/{}/items/{}'.format(auction_id, item_id))
self.assertEqual(item_id, response.json['data']['id'])
self.assertEqual(self.initial_item_data['description'], response.json["data"]["description"])
self.assertEqual(self.initial_item_data['quantity'], response.json["data"]["quantity"])
self.assertEqual(self.initial_item_data['address'], response.json["data"]["address"])
# Patch item
patch_data = {'description': 'DESCRIPTION_' + uuid4().hex, 'id': '0*32'}
response = self.app.patch_json(
'/auctions/{}/items/{}'.format(auction_id, item_id),
{'data': patch_data},
headers=access_header
)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertNotEqual(patch_data['id'], response.json['data']['id'])
self.assertEqual(patch_data['description'], response.json["data"]["description"])
def check_patch_auction_in_not_editable_statuses(self):
self.app.authorization = ('Basic', ('broker', ''))
# Auction creation
data = self.initial_data.copy()
response = self.app.post_json('/auctions', {'data': data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
auction_id = response.json['data']['id']
owner_token = response.json['access']['token']
access_header = {'X-Access-Token': str(owner_token)}
self.auction_id = auction_id
self.set_status('active.tendering')
# Item creation
response = self.app.post_json(
'/auctions/{}/items'.format(auction_id),
{'data': self.initial_item_data},
headers=access_header
)
item_id = response.json['data']['id']
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
# Change status in which you can edit auction
desired_status = 'active.auction'
self.set_status(desired_status)
self.app.authorization = ('Basic', ('broker', ''))
# Trying to create new item
response = self.app.post_json(
'/auctions/{}/items'.format(auction_id),
{'data': self.initial_item_data},
headers=access_header,
status=403
)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(
response.json['errors'][0]['description'],
"You can't change items in this status ({})".format(desired_status)
)
# Trying to update new item
response = self.app.patch_json(
'/auctions/{}/items/{}'.format(auction_id, item_id),
{'data': {'description': uuid4().hex}},
headers=access_header,
status=403
)
self.assertEqual(response.status, '403 Forbidden')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(
response.json['errors'][0]['description'],
"You can't change items in this status ({})".format(desired_status)
)
def validate_change_items_after_rectification_period(self):
self.app.authorization = ('Basic', ('broker', ''))
# Auction creation
data = self.initial_data.copy()
response = self.app.post_json('/auctions', {'data': data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
auction_id = response.json['data']['id']
owner_token = response.json['access']['token']
access_header = {'X-Access-Token': str(owner_token)}
self.auction_id = auction_id
self.set_status('active.tendering')
# Item creation
response = self.app.post_json(
'/auctions/{}/items'.format(auction_id),
{'data': self.initial_item_data},
headers=access_header
)
item_id = response.json['data']['id']
self.assertEqual(response.status, '201 Created')
# Change rectification period
fromdb = self.db.get(auction_id)
fromdb = AppraisalAuction(fromdb)
fromdb.tenderPeriod.startDate = calculate_business_date(
fromdb.tenderPeriod.startDate,
-timedelta(days=15),
fromdb,
working_days=True
)
fromdb.tenderPeriod.endDate = calculate_business_date(
fromdb.tenderPeriod.startDate,
timedelta(days=7),
fromdb,
working_days=True
)
fromdb = fromdb.store(self.db)
self.assertEqual(fromdb.id, auction_id)
# Check if items can`t be edited
response = self.app.post_json(
'/auctions/{}/items'.format(auction_id),
{'data': self.initial_item_data},
headers=access_header,
status=403
)
self.assertEqual(response.json['errors'][0]['description'], 'You can\'t change items after rectification period')
response = self.app.patch_json(
'/auctions/{}/items/{}'.format(auction_id, item_id),
{'data': {'description': uuid4().hex}},
headers=access_header,
status=403
)
self.assertEqual(response.json['errors'][0]['description'], 'You can\'t change items after rectification period')
def batch_create_items(self):
self.app.authorization = ('Basic', ('broker', ''))
data = self.initial_data.copy()
data['items'] = [self.initial_item_data]
# Auction creation
response = self.app.post_json('/auctions', {'data': data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']['items']), len(data['items']))
def batch_update_items(self):
self.app.authorization = ('Basic', ('broker', ''))
data = self.initial_data.copy()
data['items'] = [self.initial_item_data]
# Auction creation
response = self.app.post_json('/auctions', {'data': data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']['items']), len(data['items']))
auction_id = response.json['data']['id']
owner_token = response.json['access']['token']
access_header = {'X-Access-Token': str(owner_token)}
self.app.patch_json(
'/auctions/{}'.format(auction_id),
{'data': {'status': 'active.tendering'}},
headers=access_header
)
# Update items with batch mode
item_2 = deepcopy(self.initial_item_data)
del item_2['id']
patch_items = {'items': [self.initial_item_data, item_2]}
response = self.app.patch_json(
'/auctions/{}'.format(auction_id),
{'data': patch_items},
headers=access_header
)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
self.assertEqual(len(response.json['data']['items']), len(patch_items['items']))
def check_bids_invalidation(self):
self.app.authorization = ('Basic', ('broker', ''))
# Auction creation
data = self.initial_data.copy()
response = self.app.post_json('/auctions', {'data': data})
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
auction_id = response.json['data']['id']
owner_token = response.json['access']['token']
access_header = {'X-Access-Token': str(owner_token)}
self.auction_id = auction_id
self.set_status('active.tendering')
# Create and activate bid
response = self.app.post_json(
'/auctions/{}/bids'.format(auction_id),
{'data': {'tenderers': [self.initial_organization], "status": "draft", 'qualified': True, 'eligible': True}}
)
self.assertEqual(response.status, '201 Created')
self.assertEqual(response.content_type, 'application/json')
bidder_id = response.json['data']['id']
bid_token = response.json['access']['token']
self.app.patch_json(
'/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token),
{'data': {'status': 'active'}}
)
# Create item
response = self.app.post_json(
'/auctions/{}/items'.format(auction_id),
{'data': self.initial_item_data},
headers=access_header
)
item_id = response.json['data']['id']
# Check if bid invalidated
response = self.app.get(
'/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token)
)
self.assertEqual(response.json['data']['status'], 'invalid')
response = self.app.get('/auctions/{}'.format(auction_id))
self.assertIn('invalidationDate', response.json['data']['rectificationPeriod'])
invalidation_date = response.json['data']['rectificationPeriod']['invalidationDate']
# Activate bid again and check if status changes
self.app.patch_json(
'/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token),
{'data': {'status': 'active'}}
)
response = self.app.get(
'/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token)
)
self.assertEqual(response.json['data']['status'], 'active')
# Patch item
response = self.app.patch_json(
'/auctions/{}/items/{}'.format(auction_id, item_id),
{'data': {}},
headers=access_header
)
self.assertEqual(response.status, '200 OK')
self.assertEqual(response.content_type, 'application/json')
response = self.app.get(
'/auctions/{}/bids/{}?acc_token={}'.format(auction_id, bidder_id, bid_token)
)
self.assertEqual(response.json['data']['status'], 'invalid')
response = self.app.get('/auctions/{}'.format(auction_id))
self.assertIn('invalidationDate', response.json['data']['rectificationPeriod'])
self.assertNotEqual(invalidation_date, response.json['data']['rectificationPeriod']['invalidationDate'])
| 36.837975
| 117
| 0.665315
| 1,696
| 14,551
| 5.54717
| 0.079009
| 0.106824
| 0.107568
| 0.052509
| 0.881696
| 0.861182
| 0.852041
| 0.833652
| 0.820685
| 0.816114
| 0
| 0.007276
| 0.168854
| 14,551
| 395
| 118
| 36.837975
| 0.77063
| 0.039791
| 0
| 0.759322
| 0
| 0
| 0.190189
| 0.020869
| 0
| 0
| 0
| 0
| 0.247458
| 1
| 0.027119
| false
| 0
| 0.016949
| 0
| 0.044068
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c93ae6eb931fecf508d13b3d1411d50047ff3a7a
| 129
|
py
|
Python
|
anvil/sub_rig_templates/bird_wing.py
|
AndresMWeber/Anvil
|
9cd202183ac998983c2bf6e55cc46bbc0ca1a78e
|
[
"Apache-2.0"
] | 3
|
2019-11-22T04:38:06.000Z
|
2022-01-19T08:27:18.000Z
|
anvil/sub_rig_templates/bird_wing.py
|
AndresMWeber/Anvil
|
9cd202183ac998983c2bf6e55cc46bbc0ca1a78e
|
[
"Apache-2.0"
] | 28
|
2018-02-01T20:39:42.000Z
|
2018-04-26T17:25:23.000Z
|
anvil/sub_rig_templates/bird_wing.py
|
AndresMWeber/Anvil
|
9cd202183ac998983c2bf6e55cc46bbc0ca1a78e
|
[
"Apache-2.0"
] | 1
|
2018-03-11T06:47:26.000Z
|
2018-03-11T06:47:26.000Z
|
from limb import Limb
class BirdWing(Limb):
BUILT_IN_META_DATA = Limb.BUILT_IN_META_DATA.merge({'name': 'wing'}, new=True)
| 21.5
| 82
| 0.736434
| 21
| 129
| 4.238095
| 0.666667
| 0.202247
| 0.247191
| 0.337079
| 0.426966
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.131783
| 129
| 5
| 83
| 25.8
| 0.794643
| 0
| 0
| 0
| 0
| 0
| 0.062016
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c95ce0ca56d84728f833b8dd59f326a2ad608973
| 3,652
|
py
|
Python
|
test/src/data/test_mask.py
|
gusriobr/vineyard-sketcher
|
b1018dd7e3e3cf3de72817831c3e5feec00157b9
|
[
"Apache-2.0"
] | null | null | null |
test/src/data/test_mask.py
|
gusriobr/vineyard-sketcher
|
b1018dd7e3e3cf3de72817831c3e5feec00157b9
|
[
"Apache-2.0"
] | null | null | null |
test/src/data/test_mask.py
|
gusriobr/vineyard-sketcher
|
b1018dd7e3e3cf3de72817831c3e5feec00157b9
|
[
"Apache-2.0"
] | null | null | null |
import os
import unittest
import numpy as np
import cfg_test as tcfg
from skimage import io
from image.mask import MaskMerger, clean_instaces, PrimeIdMasMerger
class TestMaskMerger(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.iterations = cls._load_images()
@classmethod
def _load_images(self):
# iterations
return [
['image_0_0_0.png'],
['image_0_100_0.png'],
['image_0_200_0.png'],
['image_0_300_0.png'],
['image_0_300_1.png', 'image_0_300_2.png'],
['image_0_400_0.png', 'image_0_400_1.png'],
['image_0_500_0.png', 'image_0_500_1.png'],
['image_0_600_0.png', 'image_0_600_1.png'],
['image_0_700_0.png'],
['image_0_800_0.png']
]
def test_apply(self):
base_folder = tcfg.resource("masks")
out_img = np.zeros((512, 512 * 9), dtype=np.uint8)
r = 0
merger = MaskMerger()
for iter in self.iterations:
# load files
masks = np.zeros((512, 512, len(iter)), dtype=np.uint8)
for i in range(0, len(iter)):
masks[:, :, i] = io.imread(os.path.join(base_folder, iter[i])).astype(np.uint8)
pos = list(map(int, iter[0].split("_")[1:3]))
pos.reverse()
merger.apply(out_img, masks, pos)
ids, counts = np.unique(out_img, return_counts=True)
factor = 255 // len(ids)
io.imsave("/tmp/salida_{}.png".format(r), out_img * factor)
r += 1
# comprobamos el número de instancias
clean_instaces(out_img)
ids, counts = np.unique(out_img, return_counts=True)
factor = 255 // len(ids)
io.imsave("/tmp/output_image.png".format(r), out_img * factor)
self.assertEqual(3, len(ids)) # 0,1,2
class TestPrimeMaskMerger(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.iterations = cls._load_images()
@classmethod
def _load_images(self):
# iterations
return [
['image_0_0_0.png'],
['image_0_100_0.png'],
['image_0_200_0.png'],
['image_0_300_0.png'],
['image_0_300_1.png', 'image_0_300_2.png'],
['image_0_400_0.png', 'image_0_400_1.png'],
['image_0_500_0.png', 'image_0_500_1.png'],
['image_0_600_0.png', 'image_0_600_1.png'],
['image_0_700_0.png'],
['image_0_800_0.png']
]
def test_apply(self):
base_folder = tcfg.resource("masks")
out_img = np.zeros((512, 512 * 9), dtype=np.uint8)
r = 0
merger = PrimeIdMasMerger()
for iter in self.iterations:
# load files
masks = np.zeros((512, 512, len(iter)), dtype=np.uint8)
for i in range(0, len(iter)):
masks[:, :, i] = io.imread(os.path.join(base_folder, iter[i])).astype(np.uint8)
pos = list(map(int, iter[0].split("_")[1:3]))
pos.reverse()
merger.apply(out_img, masks, pos)
ids, counts = np.unique(out_img, return_counts=True)
factor = 255 // len(ids)
io.imsave("/tmp/salida_{}.png".format(r), out_img * factor)
r += 1
# comprobamos el número de instancias
clean_instaces(out_img)
ids, counts = np.unique(out_img, return_counts=True)
factor = 255 // len(ids)
io.imsave("/tmp/output_image.png".format(r), out_img * factor)
self.assertEqual(3, len(ids)) # 0,1,2
if __name__ == "__main__":
unittest.main()
| 31.756522
| 95
| 0.560241
| 498
| 3,652
| 3.839357
| 0.190763
| 0.087866
| 0.122385
| 0.083682
| 0.882845
| 0.882845
| 0.882845
| 0.882845
| 0.882845
| 0.882845
| 0
| 0.078094
| 0.29874
| 3,652
| 114
| 96
| 32.035088
| 0.668489
| 0.034775
| 0
| 0.837209
| 0
| 0
| 0.162116
| 0.011945
| 0
| 0
| 0
| 0
| 0.023256
| 1
| 0.069767
| false
| 0
| 0.069767
| 0.023256
| 0.186047
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c9745f2f22ed34fe5293cf432c9cf0d96724ee1d
| 19,661
|
py
|
Python
|
lacquer/tree/visitor.py
|
provingground-moe/lacquer
|
4b005400de995288c8ca9050342097e0f4665d49
|
[
"Apache-2.0"
] | 30
|
2017-01-13T00:27:29.000Z
|
2022-03-24T09:20:06.000Z
|
lacquer/tree/visitor.py
|
provingground-moe/lacquer
|
4b005400de995288c8ca9050342097e0f4665d49
|
[
"Apache-2.0"
] | 15
|
2017-01-24T23:54:07.000Z
|
2017-03-31T20:19:40.000Z
|
lacquer/tree/visitor.py
|
provingground-moe/lacquer
|
4b005400de995288c8ca9050342097e0f4665d49
|
[
"Apache-2.0"
] | 16
|
2017-08-22T17:35:54.000Z
|
2019-06-25T16:04:23.000Z
|
# -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .join_criteria import JoinOn, JoinUsing
from .grouping import SimpleGroupBy, GroupingSets
class AstVisitor(object):
def process(self, node, context=None):
return node.accept(self, context)
def visit_node(self, node, context):
pass
def visit_expression(self, node, context):
return self.visit_node(node, context)
def visit_reset_session(self, node, context):
return self.visit_statement(node, context)
def visit_current_time(self, node, context):
return self.visit_expression(node, context)
def visit_extract(self, node, context):
return self.visit_expression(node, context)
def visit_arithmetic_binary(self, node, context):
return self.visit_expression(node, context)
def visit_between_predicate(self, node, context):
return self.visit_expression(node, context)
def visit_coalesce_expression(self, node, context):
return self.visit_expression(node, context)
def visit_comparison_expression(self, node, context):
return self.visit_expression(node, context)
def visit_literal(self, node, context):
return self.visit_expression(node, context)
def visit_double_literal(self, node, context):
return self.visit_literal(node, context)
def visit_statement(self, node, context):
return self.visit_node(node, context)
def visit_query(self, node, context):
return self.visit_statement(node, context)
def visit_explain(self, node, context):
return self.visit_statement(node, context)
def visit_show_tables(self, node, context):
return self.visit_statement(node, context)
def visit_show_schemas(self, node, context):
return self.visit_statement(node, context)
def visit_show_catalogs(self, node, context):
return self.visit_statement(node, context)
def visit_show_columns(self, node, context):
return self.visit_statement(node, context)
def visit_show_partitions(self, node, context):
return self.visit_statement(node, context)
def visit_show_functions(self, node, context):
return self.visit_statement(node, context)
def visit_use(self, node, context):
return self.visit_statement(node, context)
def visit_show_session(self, node, context):
return self.visit_statement(node, context)
def visit_set_session(self, node, context):
return self.visit_statement(node, context)
def visit_generic_literal(self, node, context):
return self.visit_literal(node, context)
def visit_time_literal(self, node, context):
return self.visit_literal(node, context)
def visit_explain_option(self, node, context):
return self.visit_node(node, context)
def visit_with(self, node, context):
return self.visit_node(node, context)
def visit_approximate(self, node, context):
return self.visit_node(node, context)
def visit_with_query(self, node, context):
return self.visit_node(node, context)
def visit_select(self, node, context):
return self.visit_node(node, context)
def visit_relation(self, node, context):
return self.visit_node(node, context)
def visit_query_body(self, node, context):
return self.visit_relation(node, context)
def visit_query_specification(self, node, context):
return self.visit_query_body(node, context)
def visit_set_operation(self, node, context):
return self.visit_query_body(node, context)
def visit_union(self, node, context):
return self.visit_set_operation(node, context)
def visit_intersect(self, node, context):
return self.visit_set_operation(node, context)
def visit_except(self, node, context):
return self.visit_set_operation(node, context)
def visit_timestamp_literal(self, node, context):
return self.visit_literal(node, context)
def visit_when_clause(self, node, context):
return self.visit_expression(node, context)
def visit_interval_literal(self, node, context):
return self.visit_literal(node, context)
def visit_in_predicate(self, node, context):
return self.visit_expression(node, context)
def visit_function_call(self, node, context):
return self.visit_expression(node, context)
def visit_lambda_expression(self, node, context):
return self.visit_expression(node, context)
def visit_simple_case_expression(self, node, context):
return self.visit_expression(node, context)
def visit_string_literal(self, node, context):
return self.visit_literal(node, context)
def visit_binary_literal(self, node, context):
return self.visit_literal(node, context)
def visit_boolean_literal(self, node, context):
return self.visit_literal(node, context)
def visit_in_list_expression(self, node, context):
return self.visit_expression(node, context)
def visit_qualified_name_reference(self, node, context):
return self.visit_expression(node, context)
def visit_dereference_expression(self, node, context):
return self.visit_expression(node, context)
def visit_null_if_expression(self, node, context):
return self.visit_expression(node, context)
def visit_if_expression(self, node, context):
return self.visit_expression(node, context)
def visit_null_literal(self, node, context):
return self.visit_literal(node, context)
def visit_arithmetic_unary(self, node, context):
return self.visit_expression(node, context)
def visit_not_expression(self, node, context):
return self.visit_expression(node, context)
def visit_select_item(self, node, context):
return self.visit_node(node, context)
def visit_single_column(self, node, context):
return self.visit_select_item(node, context)
def visit_all_columns(self, node, context):
return self.visit_select_item(node, context)
def visit_searched_case_expression(self, node, context):
return self.visit_expression(node, context)
def visit_like_predicate(self, node, context):
return self.visit_expression(node, context)
def visit_is_not_null_predicate(self, node, context):
return self.visit_expression(node, context)
def visit_is_null_predicate(self, node, context):
return self.visit_expression(node, context)
def visit_array_constructor(self, node, context):
return self.visit_expression(node, context)
def visit_subscript_expression(self, node, context):
return self.visit_expression(node, context)
def visit_long_literal(self, node, context):
return self.visit_literal(node, context)
def visit_logical_binary_expression(self, node, context):
return self.visit_expression(node, context)
def visit_subquery_expression(self, node, context):
return self.visit_expression(node, context)
def visit_sort_item(self, node, context):
return self.visit_node(node, context)
def visit_table(self, node, context):
return self.visit_query_body(node, context)
def visit_unnest(self, node, context):
return self.visit_relation(node, context)
def visit_values(self, node, context):
return self.visit_query_body(node, context)
def visit_row(self, node, context):
return self.visit_node(node, context)
def visit_table_subquery(self, node, context):
return self.visit_query_body(node, context)
def visit_aliased_relation(self, node, context):
return self.visit_relation(node, context)
def visit_sampled_relation(self, node, context):
return self.visit_relation(node, context)
def visit_join(self, node, context):
return self.visit_relation(node, context)
def visit_exists(self, node, context):
return self.visit_expression(node, context)
def visit_try_expression(self, node, context):
return self.visit_expression(node, context)
def visit_cast(self, node, context):
return self.visit_expression(node, context)
def visit_input_reference(self, node, context):
return self.visit_expression(node, context)
def visit_window(self, node, context):
return self.visit_node(node, context)
def visit_window_frame(self, node, context):
return self.visit_node(node, context)
def visit_frame_bound(self, node, context):
return self.visit_node(node, context)
def visit_call_argument(self, node, context):
return self.visit_node(node, context)
def visit_table_element(self, node, context):
return self.visit_node(node, context)
def visit_create_table(self, node, context):
return self.visit_statement(node, context)
def visit_create_table_as_select(self, node, context):
return self.visit_statement(node, context)
def visit_drop_table(self, node, context):
return self.visit_statement(node, context)
def visit_rename_table(self, node, context):
return self.visit_statement(node, context)
def visit_rename_column(self, node, context):
return self.visit_statement(node, context)
def visit_add_column(self, node, context):
return self.visit_statement(node, context)
def visit_create_view(self, node, context):
return self.visit_statement(node, context)
def visit_drop_view(self, node, context):
return self.visit_statement(node, context)
def visit_insert(self, node, context):
return self.visit_node(node, context)
def visit_call(self, node, context):
return self.visit_node(node, context)
def visit_delete(self, node, context):
return self.visit_statement(node, context)
def visit_start_transaction(self, node, context):
return self.visit_statement(node, context)
def visit_grant(self, node, context):
return self.visit_statement(node, context)
def visit_transaction_mode(self, node, context):
return self.visit_node(node, context)
def visit_isolation_level(self, node, context):
return self.visit_transaction_mode(node, context)
def visit_transaction_access_mode(self, node, context):
return self.visit_transaction_mode(node, context)
def visit_commit(self, node, context):
return self.visit_statement(node, context)
def visit_rollback(self, node, context):
return self.visit_statement(node, context)
def visit_at_time_zone(self, node, context):
return self.visit_expression(node, context)
class DefaultTraversalVisitor(AstVisitor):
def visit_extract(self, node, context):
return self.process(node.expression, context)
def visit_cast(self, node, context):
return self.process(node.expression, context)
def visit_arithmetic_binary(self, node, context):
self.process(node.left, context)
self.process(node.right, context)
return None
def visit_between_predicate(self, node, context):
self.process(node.value, context)
self.process(node.min, context)
self.process(node.max, context)
return None
def visit_coalesce_expression(self, node, context):
for operand in node.operands:
self.process(operand, context)
return None
def visit_at_time_zone(self, node, context):
self.process(node.value, context)
self.process(node.time_zone, context)
return None
def visit_array_constructor(self, node, context):
for expression in node.values:
self.process(expression, context)
return None
def visit_subscript_expression(self, node, context):
self.process(node.base, context)
self.process(node.index, context)
return None
def visit_comparison_expression(self, node, context):
self.process(node.left, context)
self.process(node.right, context)
return None
def visit_query(self, node, context):
self.process(node.query_body, context)
for sort_item in node.order_by:
self.process(sort_item, context)
return None
def visit_with(self, node, context):
for query in node.queries:
self.process(query, context)
return None
def visit_with_query(self, node, context):
return self.process(node.query, context)
def visit_select(self, node, context):
for item in node.select_items:
self.process(item, context)
return None
def visit_single_column(self, node, context):
self.process(node.expression, context)
return None
def visit_when_clause(self, node, context):
self.process(node.operand, context)
self.process(node.result, context)
return None
def visit_in_predicate(self, node, context):
self.process(node.value, context)
self.process(node.value_list, context)
return None
def visit_function_call(self, node, context):
for argument in node.arguments:
self.process(argument, context)
if node.window:
self.process(node.window, context)
return None
def visit_dereference_expression(self, node, context):
self.process(node.base, context)
return None
"""
def visit_window(self, node, context)
for expression in node.partition:
self.process(expression, context)
for sort_item in node.order_by:
self.process(sort_item.sort_key, context)
if node.frame:
self.process(node.frame, context)
return None
def visit_window_frame(self, node, context)
self.process(node.start, context)
if node.end:
self.process(node.end, context)
return None
def visit_frame_bound(self, node, context)
if node.value:
self.process(node.value, context)
return None
"""
def visit_simple_case_expression(self, node, context):
self.process(node.operand, context)
for clause in node.when_clauses:
self.process(clause, context)
if node.default_value:
self.process(node.default_valuee, context)
return None
def visit_in_list_expression(self, node, context):
for value in node.values:
self.process(value, context)
return None
def visit_None_if_expression(self, node, context):
self.process(node.first, context)
self.process(node.second, context)
return None
def visit_if_expression(self, node, context):
self.process(node.condition, context)
self.process(node.true_value, context)
if node.false_value:
self.process(node.false_value, context)
return None
def visit_try_expression(self, node, context):
self.process(node.inner_expression, context)
return None
def visit_arithmetic_unary(self, node, context):
return self.process(node.value, context)
def visit_not_expression(self, node, context):
return self.process(node.value, context)
def visit_searched_case_expression(self, node, context):
for clause in node.when_clauses:
self.process(clause, context)
if node.default_value:
self.process(node.default_value, context)
return None
def visit_like_predicate(self, node, context):
self.process(node.value, context)
self.process(node.pattern, context)
if node.escape is not None:
self.process(node.escape, context)
return None
def visit_is_not_None_predicate(self, node, context):
return self.process(node.value, context)
def visit_is_None_predicate(self, node, context):
return self.process(node.value, context)
def visit_logical_binary_expression(self, node, context):
self.process(node.left, context)
self.process(node.right, context)
return None
def visit_subquery_expression(self, node, context):
return self.process(node.query, context)
def visit_sort_item(self, node, context):
return self.process(node.sort_key, context)
def visit_query_specification(self, node, context):
self.process(node.select, context)
if node.from_:
self.process(node.from_, context)
if node.where:
self.process(node.where, context)
if node.group_by:
grouping_elements = []
if isinstance(node.group_by, SimpleGroupBy):
grouping_elements = node.group_by.columns
elif isinstance(node.group_by, GroupingSets):
grouping_elements = node.group_by.sets
for grouping_element in grouping_elements:
self.process(grouping_element, context)
if node.having:
self.process(node.having, context)
for sort_item in node.order_by:
self.process(sort_item, context)
return None
def visit_union(self, node, context):
for relation in node.relations:
self.process(relation, context)
return None
def visit_intersect(self, node, context):
for relation in node.relations:
self.process(relation, context)
return None
def visit_except(self, node, context):
self.process(node.left, context)
self.process(node.right, context)
return None
def visit_values(self, node, context):
for row in node.rows:
self.process(row, context)
return None
def visit_row(self, node, context):
for expression in node.items:
self.process(expression, context)
return None
def visit_table_subquery(self, node, context):
return self.process(node.query, context)
def visit_aliased_relation(self, node, context):
return self.process(node.relation, context)
def visit_sampled_relation(self, node, context):
self.process(node.relation, context)
self.process(node.get_sample_percentage(), context)
if node.get_columns_to_stratify_on().is_present():
for expression in node.get_columns_to_stratify_on().get():
self.process(expression, context)
return None
def visit_join(self, node, context):
self.process(node.left, context)
self.process(node.right, context)
if isinstance(node.criteria, JoinOn):
self.process(node.criteria.expression, context)
elif isinstance(node.criteria, JoinUsing):
self.process(node.criteria.columns)
return None
class DefaultExpressionTraversalVisitor(DefaultTraversalVisitor):
def __init__(self, line=None, pos=None):
super(DefaultExpressionTraversalVisitor, self).__init__(line, pos)
def visit_subquery_expression(self, node, context):
return None
| 33.323729
| 74
| 0.685367
| 2,459
| 19,661
| 5.295242
| 0.093941
| 0.214576
| 0.17395
| 0.18547
| 0.826665
| 0.799171
| 0.758083
| 0.705783
| 0.658244
| 0.632133
| 0
| 0.000329
| 0.226998
| 19,661
| 589
| 75
| 33.380306
| 0.856428
| 0.027415
| 0
| 0.649874
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.375315
| false
| 0.002519
| 0.005038
| 0.292191
| 0.758186
| 0
| 0
| 0
| 0
| null | 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
a32fad7e0bbc85a3e9f71f5a0d3f117e655f31fd
| 194
|
py
|
Python
|
replace_text/__init__.py
|
jakeogh/replace-text
|
f6ea64fae8b73c5b2125009bf93f1b4294b2e2a4
|
[
"Unlicense"
] | 1
|
2020-12-03T02:50:39.000Z
|
2020-12-03T02:50:39.000Z
|
replace_text/__init__.py
|
jakeogh/replace-text
|
f6ea64fae8b73c5b2125009bf93f1b4294b2e2a4
|
[
"Unlicense"
] | null | null | null |
replace_text/__init__.py
|
jakeogh/replace-text
|
f6ea64fae8b73c5b2125009bf93f1b4294b2e2a4
|
[
"Unlicense"
] | null | null | null |
#from .replace_text import replace_text
from .replace_text import append_unique_bytes_to_file
from .replace_text import remove_comments_from_bytes
from .replace_text import replace_text_in_file
| 38.8
| 53
| 0.891753
| 31
| 194
| 5.096774
| 0.387097
| 0.417722
| 0.379747
| 0.531646
| 0.405063
| 0.405063
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082474
| 194
| 4
| 54
| 48.5
| 0.88764
| 0.195876
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a3864e2520247e4f543f7ee40d86eaed233d17ea
| 19,728
|
py
|
Python
|
tests/ut/python/pipeline/parse/test_sequence_assign.py
|
httpsgithu/mindspore
|
c29d6bb764e233b427319cb89ba79e420f1e2c64
|
[
"Apache-2.0"
] | 1
|
2022-02-23T09:13:43.000Z
|
2022-02-23T09:13:43.000Z
|
tests/ut/python/pipeline/parse/test_sequence_assign.py
|
949144093/mindspore
|
c29d6bb764e233b427319cb89ba79e420f1e2c64
|
[
"Apache-2.0"
] | null | null | null |
tests/ut/python/pipeline/parse/test_sequence_assign.py
|
949144093/mindspore
|
c29d6bb764e233b427319cb89ba79e420f1e2c64
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020-2022 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
""" test enumerate"""
import numpy as np
import pytest
import mindspore.nn as nn
from mindspore.nn import Cell
from mindspore.ops import composite as C
from mindspore.ops import operations as P
from mindspore import Tensor, ms_function
from mindspore import context
def test_list_index_1d():
"""
Feature: List index assign
Description: Test list assign in pynative mode
Expectation: No exception.
"""
context.set_context(mode=context.PYNATIVE_MODE)
class Net(nn.Cell):
def construct(self):
list_ = [[1], [2, 2], [3, 3, 3]]
list_[0] = [100]
return list_
net = Net()
out = net()
assert list(out[0]) == [100]
assert list(out[1]) == [2, 2]
assert list(out[2]) == [3, 3, 3]
context.set_context(mode=context.GRAPH_MODE)
net = Net()
out = net()
assert list(out[0]) == [100]
assert list(out[1]) == [2, 2]
assert list(out[2]) == [3, 3, 3]
def test_list_neg_index_1d():
"""
Feature: List index assign
Description: Test list assign in pynative mode
Expectation: No exception.
"""
context.set_context(mode=context.PYNATIVE_MODE)
class Net(nn.Cell):
def construct(self):
list_ = [[1], [2, 2], [3, 3, 3]]
list_[-3] = [100]
return list_
net = Net()
out = net()
assert list(out[0]) == [100]
assert list(out[1]) == [2, 2]
assert list(out[2]) == [3, 3, 3]
context.set_context(mode=context.GRAPH_MODE)
out = net()
assert list(out[0]) == [100]
assert list(out[1]) == [2, 2]
assert list(out[2]) == [3, 3, 3]
def test_list_index_2d():
"""
Feature: List index assign
Description: Test list assign in pynative mode
Expectation: No exception.
"""
context.set_context(mode=context.PYNATIVE_MODE)
class Net(nn.Cell):
def construct(self):
list_ = [[1], [2, 2], [3, 3, 3]]
list_[1][0] = 200
list_[1][1] = 201
return list_
net = Net()
out = net()
assert list(out[0]) == [1]
assert list(out[1]) == [200, 201]
assert list(out[2]) == [3, 3, 3]
context.set_context(mode=context.GRAPH_MODE)
out = net()
assert list(out[0]) == [1]
assert list(out[1]) == [200, 201]
assert list(out[2]) == [3, 3, 3]
def test_list_neg_index_2d():
"""
Feature: List index assign
Description: Test list assign in pynative mode
Expectation: No exception.
"""
context.set_context(mode=context.PYNATIVE_MODE)
class Net(nn.Cell):
def construct(self):
list_ = [[1], [2, 2], [3, 3, 3]]
list_[1][-2] = 20
list_[1][-1] = 21
return list_
net = Net()
out = net()
assert list(out[0]) == [1]
assert list(out[1]) == [20, 21]
assert list(out[2]) == [3, 3, 3]
context.set_context(mode=context.GRAPH_MODE)
out = net()
assert list(out[0]) == [1]
assert list(out[1]) == [20, 21]
assert list(out[2]) == [3, 3, 3]
def test_list_index_3d():
"""
Feature: List index assign
Description: Test list assign in pynative mode
Expectation: No exception.
"""
class Net(nn.Cell):
def construct(self):
list_ = [[1], [2, 2], [[3, 3, 3]]]
list_[2][0][0] = 300
list_[2][0][1] = 301
list_[2][0][2] = 302
return list_
context.set_context(mode=context.PYNATIVE_MODE)
net = Net()
out = net()
assert list(out[0]) == [1]
assert list(out[1]) == [2, 2]
assert list(out[2][0]) == [300, 301, 302]
context.set_context(mode=context.GRAPH_MODE)
out = net()
assert list(out[0]) == [1]
assert list(out[1]) == [2, 2]
assert list(out[2][0]) == [300, 301, 302]
def test_list_neg_index_3d():
"""
Feature: List index assign
Description: Test list assign in pynative mode
Expectation: No exception.
"""
context.set_context(mode=context.PYNATIVE_MODE)
class Net(nn.Cell):
def construct(self):
list_ = [[1], [2, 2], [[3, 3, 3]]]
list_[2][0][-3] = 30
list_[2][0][-2] = 31
list_[2][0][-1] = 32
return list_
net = Net()
out = net()
assert list(out[0]) == [1]
assert list(out[1]) == [2, 2]
assert list(out[2][0]) == [30, 31, 32]
context.set_context(mode=context.GRAPH_MODE)
out = net()
assert list(out[0]) == [1]
assert list(out[1]) == [2, 2]
assert list(out[2][0]) == [30, 31, 32]
def test_list_index_1D_parameter():
context.set_context(mode=context.GRAPH_MODE)
class Net(nn.Cell):
def construct(self, x):
list_ = [x]
list_[0] = 100
return list_
net = Net()
net(Tensor(0))
def test_list_index_2D_parameter():
context.set_context(mode=context.GRAPH_MODE)
class Net(nn.Cell):
def construct(self, x):
list_ = [[x, x]]
list_[0][0] = 100
return list_
net = Net()
net(Tensor(0))
def test_list_index_3D_parameter():
context.set_context(mode=context.GRAPH_MODE)
class Net(nn.Cell):
def construct(self, x):
list_ = [[[x, x]]]
list_[0][0][0] = 100
return list_
net = Net()
net(Tensor(0))
def test_const_list_index_3D_bprop():
context.set_context(mode=context.GRAPH_MODE)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.value = [[1], [2, 2], [[3, 3], [3, 3]]]
self.relu = P.ReLU()
def construct(self, input_x):
list_x = self.value
list_x[2][0][1] = input_x
return self.relu(list_x[2][0][1])
class GradNet(nn.Cell):
def __init__(self, net):
super(GradNet, self).__init__()
self.net = net
self.grad_all_with_sens = C.GradOperation(get_all=True, sens_param=True)
def construct(self, x, sens):
return self.grad_all_with_sens(self.net)(x, sens)
net = Net()
grad_net = GradNet(net)
x = Tensor(np.arange(2 * 3).reshape(2, 3))
sens = Tensor(np.arange(2 * 3).reshape(2, 3))
grad_net(x, sens)
def test_parameter_list_index_3D_bprop():
context.set_context(mode=context.GRAPH_MODE)
class Net(nn.Cell):
def __init__(self):
super(Net, self).__init__()
self.value = [[1], [2, 2], [[3, 3], [3, 3]]]
self.relu = P.ReLU()
def construct(self, x, value):
list_value = [[x], [x, x], [[x, x], [x, x]]]
list_value[2][0][1] = value
return self.relu(list_value[2][0][1])
class GradNet(nn.Cell):
def __init__(self, net):
super(GradNet, self).__init__()
self.net = net
self.grad_all_with_sens = C.GradOperation(get_all=True, sens_param=True)
def construct(self, x, value, sens):
return self.grad_all_with_sens(self.net)(x, value, sens)
net = Net()
grad_net = GradNet(net)
x = Tensor(np.arange(2 * 3).reshape(2, 3))
value = Tensor(np.ones((2, 3), np.int64))
sens = Tensor(np.arange(2 * 3).reshape(2, 3))
grad_net(x, value, sens)
class Net1(Cell):
def construct(self, a, b, start=None, stop=None, step=None):
a[start:stop:step] = b[start:stop:step]
return tuple(a)
def compare_func1(a, b, start=None, stop=None, step=None):
a[start:stop:step] = b[start:stop:step]
return tuple(a)
def test_list_slice_length_equal():
"""
Feature: List assign
Description: Test list assign the size is equal
Expectation: No exception.
"""
context.set_context(mode=context.PYNATIVE_MODE)
a = [1, 2, 3, 4]
b = [5, 6, 7, 8]
python_out = compare_func1(a, b, 0, None, 2)
a = [1, 2, 3, 4]
b = [5, 6, 7, 8]
net = Net1()
pynative_mode_out = net(a, b, 0, None, 2)
assert pynative_mode_out == python_out
context.set_context(mode=context.GRAPH_MODE)
graph_out = net(a, b, 0, None, 2)
assert graph_out == python_out
def test_list_slice_length_error():
"""
Feature: List assign
Description: Test list assign the size is not equal
Expectation: ValueError.
"""
context.set_context(mode=context.GRAPH_MODE)
a = [1, 2, 3, 4, 5]
b = [5, 6, 7, 8]
net = Net1()
with pytest.raises(ValueError) as err:
net(a, b, 0, None, 2)
assert "attempt to assign sequence of size 2 to extended slice of size 3" in str(err.value)
context.set_context(mode=context.PYNATIVE_MODE)
with pytest.raises(ValueError) as err:
net(a, b, 0, None, 2)
assert "attempt to assign sequence of size 2 to extended slice of size 3" in str(err.value)
def compare_func2(a, b, start=None, stop=None, step=None):
a[start:stop:step] = b
return tuple(a)
class Net2(Cell):
def construct(self, a, b, start=None, stop=None, step=None):
a[start:stop:step] = b
return tuple(a)
def test_list_slice_shrink():
"""
Feature: List assign
Description: Test list slice shrink assign
Expectation: No exception.
"""
context.set_context(mode=context.PYNATIVE_MODE)
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33]
python_out = compare_func2(a, b, 0, 5)
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33]
net = Net2()
pynative_out = net(a, b, 0, 5)
assert pynative_out == python_out
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33]
context.set_context(mode=context.GRAPH_MODE)
graph_out = net(a, b, 0, 5)
assert graph_out == python_out
def test_list_slice_insert():
"""
Feature: List assign
Description: Test list slice insert assign
Expectation: No exception.
"""
context.set_context(mode=context.PYNATIVE_MODE)
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33, 44, 55]
python_out = compare_func2(a, b, 0, 1)
net = Net2()
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33, 44, 55]
pynative_out = net(a, b, 0, 1)
assert pynative_out == python_out
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33, 44, 55]
context.set_context(mode=context.GRAPH_MODE)
graph_out = net(a, b, 0, 1)
assert graph_out == python_out
def test_list_slice_assign():
"""
Feature: List assign
Description: Test list slice start and stop is larger than size
Expectation: No exception.
"""
context.set_context(mode=context.PYNATIVE_MODE)
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33, 44, 55]
python_out = compare_func2(a, b, -12, 456)
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33, 44, 55]
net = Net2()
pynative_out = net(a, b, -12, 456)
assert pynative_out == python_out
context.set_context(mode=context.GRAPH_MODE)
graph_out = net(a, b, -12, 456)
assert graph_out == python_out
def test_list_slice_extend():
"""
Feature: List assign
Description: Test list slice extend
Expectation: No exception.
"""
context.set_context(mode=context.PYNATIVE_MODE)
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33, 44, 55]
net = Net2()
python_out = compare_func2(a, b, 1234, 0)
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33, 44, 55]
pynative_out = net(a, b, 1234, 0)
assert pynative_out == python_out
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33, 44, 55]
context.set_context(mode=context.GRAPH_MODE)
graph_out = net(a, b, 1234, 0)
assert graph_out == python_out
def test_list_slice_extend_front():
"""
Feature: List assign
Description: Test list slice extend
Expectation: No exception.
"""
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33, 44, 55]
python_out = compare_func2(a, b, 0, 0)
context.set_context(mode=context.PYNATIVE_MODE)
net = Net2()
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33, 44, 55]
pynative_out = net(a, b, 0, 0)
assert pynative_out == python_out
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33, 44, 55]
context.set_context(mode=context.GRAPH_MODE)
graph_out = net(a, b, 0, 0)
assert graph_out == python_out
def test_list_slice_extend_inner():
"""
Feature: List assign
Description: Test list slice extend
Expectation: No exception.
"""
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33, 44, 55]
python_out = compare_func2(a, b, 5, 5)
context.set_context(mode=context.PYNATIVE_MODE)
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33, 44, 55]
net = Net2()
pynative_out = net(a, b, 5, 5)
assert pynative_out == python_out
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33, 44, 55]
context.set_context(mode=context.GRAPH_MODE)
graph_out = net(a, b, 5, 5)
assert graph_out == python_out
def test_list_slice_erase():
"""
Feature: List assign
Description: Test list slice erase
Expectation: No exception.
"""
a = [1, 2, 3, 4, 5, 6, 7]
python_out = compare_func2(a, [], 1, 3)
context.set_context(mode=context.PYNATIVE_MODE)
a = [1, 2, 3, 4, 5, 6, 7]
net = Net2()
pynative_out = net(a, [], 1, 3)
assert pynative_out == python_out
context.set_context(mode=context.GRAPH_MODE)
a = [1, 2, 3, 4, 5, 6, 7]
graph_out = net(a, [], 1, 3)
assert graph_out == python_out
def test_list_slice_tuple_without_step():
"""
Feature: List assign
Description: Test list slice assign with tuple
Expectation: No exception.
"""
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = (11, 22, 33)
python_out = compare_func2(a, b, 0, 4, None)
context.set_context(mode=context.PYNATIVE_MODE)
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = (11, 22, 33)
net = Net2()
pynative_out = net(a, b, 0, 4, None)
assert pynative_out == python_out
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = (11, 22, 33)
context.set_context(mode=context.GRAPH_MODE)
graph_out = net(a, b, 0, 4, None)
assert graph_out == python_out
def test_list_slice_tuple_with_step():
"""
Feature: List assign
Description: Test list slice assign with tuple
Expectation: No exception.
"""
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = (11, 22, 33)
python_out = compare_func2(a, b, 1, None, 3)
context.set_context(mode=context.PYNATIVE_MODE)
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = (11, 22, 33)
net = Net2()
pynative_out = net(a, b, 1, None, 3)
assert pynative_out == python_out
context.set_context(mode=context.GRAPH_MODE)
graph_out = net(a, b, 1, None, 3)
assert graph_out == python_out
def test_list_double_slice():
"""
Feature: List assign
Description: Test list double slice assign
Expectation: ValueError
"""
context.set_context(mode=context.PYNATIVE_MODE)
@ms_function
def foo(a, b, start1, stop1, step1, start2, stop2, step2):
a[start1:stop1:step1][start2: stop2: step2] = b
return a
class NetInner(Cell):
def construct(self, a, b, start1, stop1, step1, start2, stop2, step2):
a[start1:stop1:step1][start2: stop2: step2] = b
return tuple(a)
net = NetInner()
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [11, 22, 33]
assert foo(a, b, 0, None, 1, 0, None, 3) == net(a, b, 0, None, 1, 0, None, 3)
def convert_tuple(a):
result = tuple()
for i in a:
if isinstance(i, list):
result += (tuple(i),)
continue
result += (i,)
return result
def test_list_in_list_slice():
"""
Feature: List assign
Description: Test high dimension list slice assign
Expectation: No exception.
"""
class TestNet(Cell):
def construct(self, a, b, index, start=None, stop=None, step=None):
a[index][start:stop:step] = b
return tuple(a)
def com_func3(a, b, index, start=None, stop=None, step=None):
a[index][start:stop:step] = b
return convert_tuple(a)
a = [1, 2, [1, 2, 3, 4, 5, 6, 7], 8, 9]
b = [1111, 2222]
python_out = com_func3(a, b, 2, 1, None, 3)
context.set_context(mode=context.PYNATIVE_MODE)
net = TestNet()
a = [1, 2, [1, 2, 3, 4, 5, 6, 7], 8, 9]
b = [1111, 2222]
pynative_out = convert_tuple(net(a, b, 2, 1, None, 3))
assert pynative_out == python_out
context.set_context(mode=context.GRAPH_MODE)
graph_out = convert_tuple(net(a, b, 2, 1, None, 3))
assert graph_out == python_out
def test_list_slice_negative_step():
"""
Feature: List assign
Description: Test negative step list slice assign
Expectation: No exception.
"""
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [33, 44, 55]
python_out = compare_func2(a, b, -1, -9, -3)
context.set_context(mode=context.PYNATIVE_MODE)
net = Net2()
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [33, 44, 55]
pynative_out = net(a, b, -1, -9, -3)
assert pynative_out == python_out
context.set_context(mode=context.GRAPH_MODE)
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [33, 44, 55]
graph_out = net(a, b, -1, -9, -3)
assert graph_out == python_out
def test_graph_list_slice_assign_extended_number():
"""
Feature: List assign
Description: Test negative step list slice assign
Expectation: No exception.
"""
a = [1, 2, 3, 4, 5, 6]
b = 1
net = Net2()
context.set_context(mode=context.PYNATIVE_MODE)
with pytest.raises(TypeError) as err:
net(a, b, 0, None, 2)
assert "must assign iterable to extended slice" in str(err.value)
context.set_context(mode=context.GRAPH_MODE)
with pytest.raises(TypeError) as err:
net(a, b, 0, None, 2)
assert "must assign iterable to extended slice" in str(err.value)
def test_graph_list_slice_assign_number():
"""
Feature: List assign
Description: Test negative step list slice assign
Expectation: No exception.
"""
a = [1, 2, 3, 4, 5, 6]
b = 1
net = Net2()
context.set_context(mode=context.PYNATIVE_MODE)
with pytest.raises(TypeError) as err:
net(a, b, 0, None, 1)
assert "can only assign an iterable" in str(err.value)
context.set_context(mode=context.GRAPH_MODE)
with pytest.raises(TypeError) as err:
net(a, b, 0, None, 1)
assert "can only assign an iterable" in str(err.value)
def test_list_slice_negetive_error():
"""
Feature: List assign
Description: Test negative step list slice assign
Expectation: ValueError
"""
a = [1, 2, 3, 4, 5, 6, 7, 8, 9]
b = [33, 44, 55]
net = Net2()
context.set_context(mode=context.PYNATIVE_MODE)
with pytest.raises(ValueError) as err:
net(a, b, -1, -3, -3)
assert "attempt to assign sequence of size 3 to extended slice of size 1" in str(err.value)
context.set_context(mode=context.GRAPH_MODE)
with pytest.raises(ValueError) as err:
net(a, b, -1, -3, -3)
assert "attempt to assign sequence of size 3 to extended slice of size 1" in str(err.value)
| 27.707865
| 95
| 0.583739
| 3,053
| 19,728
| 3.637733
| 0.071733
| 0.011165
| 0.076535
| 0.094543
| 0.873312
| 0.860436
| 0.833874
| 0.80515
| 0.789843
| 0.780029
| 0
| 0.074569
| 0.267893
| 19,728
| 711
| 96
| 27.746835
| 0.694385
| 0.144667
| 0
| 0.709534
| 0
| 0
| 0.023645
| 0
| 0
| 0
| 0
| 0
| 0.152993
| 1
| 0.119734
| false
| 0
| 0.017738
| 0.004435
| 0.223947
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a38f50dd095a39636ae5bc11181d6a64953eb2ff
| 197
|
py
|
Python
|
tests/run_all_tests.py
|
GRV96/jazal
|
b89f4303909ae53db7504b529a706afa8d07c81a
|
[
"MIT"
] | null | null | null |
tests/run_all_tests.py
|
GRV96/jazal
|
b89f4303909ae53db7504b529a706afa8d07c81a
|
[
"MIT"
] | 1
|
2021-10-06T20:21:24.000Z
|
2021-10-06T20:21:24.000Z
|
tests/run_all_tests.py
|
GRV96/path_checker
|
b89f4303909ae53db7504b529a706afa8d07c81a
|
[
"MIT"
] | null | null | null |
from os import system
system("pytest path_util_tests.py")
system("pytest path_checker_tests.py")
system("pytest reactive_path_checker_tests.py")
system("pytest missing_path_arg_warner_tests.py")
| 24.625
| 49
| 0.827411
| 31
| 197
| 4.903226
| 0.451613
| 0.315789
| 0.256579
| 0.375
| 0.394737
| 0.394737
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071066
| 197
| 7
| 50
| 28.142857
| 0.830601
| 0
| 0
| 0
| 0
| 0
| 0.654822
| 0.42132
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6ebbecc09be5ba14ae4dd34a28648e5a5edc253d
| 72,793
|
py
|
Python
|
MPKTA_Final_Project.py
|
Iqrar99/MPKT-A_Final_Project
|
642d5fad8db492f2591994f4e8737dee5de5e6ed
|
[
"Apache-2.0"
] | null | null | null |
MPKTA_Final_Project.py
|
Iqrar99/MPKT-A_Final_Project
|
642d5fad8db492f2591994f4e8737dee5de5e6ed
|
[
"Apache-2.0"
] | null | null | null |
MPKTA_Final_Project.py
|
Iqrar99/MPKT-A_Final_Project
|
642d5fad8db492f2591994f4e8737dee5de5e6ed
|
[
"Apache-2.0"
] | null | null | null |
#project akhir mpkta
#semoga lancar
#aminnn
from tkinter import *
lst = []
def readf():
with open('all.txt', 'r') as f:
line = ''
for i in range(68):
while('<deskripsi>' not in line):
line = f.readline()
cmp = ''
txt = ''
while('<end>' not in cmp):
txt += cmp
cmp = f.readline()
lst.append(txt)
line = f.readline()
class DirektoriMakanan():
def __init__(self, master=Tk()):
self.master = master
master.minsize(width = 500, height = 600)
master.maxsize(width = 500, height = 600)
self.master.title("Selamat Datang di McDones (Direktori Macanan Tradisional)")
self.master.judul = Label(self.master, text = "Pilih provinsi yang ingin anda ketahui", font = "Arial 16 bold")
self.master.judul.grid(row = 0, column = 3, columnspan = 8)
self.bprov1 = Button(self.master, text='Aceh', command=self.__prov1, width = 25)
self.bprov1.grid(row = 1, column = 3, columnspan = 4)
self.bprov2 = Button(self.master, text='Sumatera Utara', command=self.__prov2, width = 25)
self.bprov2.grid(row = 2, column = 3, columnspan = 4)
self.bprov3 = Button(self.master, text='Sumatera Barat', command=self.__prov3, width = 25)
self.bprov3.grid(row = 3, column = 3, columnspan = 4)
self.bprov4 = Button(self.master, text='Riau', command=self.__prov4, width = 25)
self.bprov4.grid(row = 4, column = 3, columnspan = 4)
self.bprov5 = Button(self.master, text='Kepulauan Riau', command=self.__prov5, width = 25)
self.bprov5.grid(row = 5, column = 3, columnspan = 4)
self.bprov6 = Button(self.master, text='Jambi', command=self.__prov6, width = 25)
self.bprov6.grid(row = 6, column = 3, columnspan = 4)
self.bprov7 = Button(self.master, text='Bengukulu', command=self.__prov7, width = 25)
self.bprov7.grid(row = 7, column = 3, columnspan = 4)
self.bprov8 = Button(self.master, text='Sumatera Selatan', command=self.__prov8, width = 25)
self.bprov8.grid(row = 8, column = 3, columnspan = 4)
self.bprov9 = Button(self.master, text='Kepulauan Bangka Belitung', command=self.__prov9, width = 25)
self.bprov9.grid(row = 9, column = 3, columnspan = 4)
self.bprov10 = Button(self.master, text='Lampung', command=self.__prov10, width = 25)
self.bprov10.grid(row = 10, column = 3, columnspan = 4)
self.bprov11 = Button(self.master, text='Banten', command=self.__prov11, width = 25)
self.bprov11.grid(row = 11, column = 3, columnspan = 4)
self.bprov12 = Button(self.master, text='Jawa Barat', command=self.__prov12, width = 25)
self.bprov12.grid(row = 12, column = 3, columnspan = 4)
self.bprov13 = Button(self.master, text='DKI Jakarta', command=self.__prov13, width = 25)
self.bprov13.grid(row = 13, column = 3, columnspan = 4)
self.bprov14 = Button(self.master, text='Jawa Tengah', command=self.__prov14, width = 25)
self.bprov14.grid(row = 14, column = 3, columnspan = 4)
self.bprov15 = Button(self.master, text='DI Yogyakarta', command=self.__prov15, width = 25)
self.bprov15.grid(row = 15, column = 3, columnspan = 4)
self.bprov16 = Button(self.master, text='Jawa Timur', command=self.__prov16, width = 25)
self.bprov16.grid(row = 16, column = 3, columnspan = 4)
self.bprov17 = Button(self.master, text='Bali', command=self.__prov17, width = 25)
self.bprov17.grid(row = 17, column = 3, columnspan = 4)
self.bprov18 = Button(self.master, text='NTB', command=self.__prov18, width = 25)
self.bprov18.grid(row = 1, column = 7, columnspan = 4)
self.bprov19 = Button(self.master, text='NTT', command=self.__prov19, width = 25)
self.bprov19.grid(row = 2, column = 7, columnspan = 4)
self.bprov20 = Button(self.master, text='Kalimantan Utara', command=self.__prov20, width = 25)
self.bprov20.grid(row = 3, column = 7, columnspan = 4)
self.bprov21 = Button(self.master, text='Kalimantan Barat', command=self.__prov21, width = 25)
self.bprov21.grid(row = 4, column = 7, columnspan = 4)
self.bprov22 = Button(self.master, text='Kalimantan Tengah', command=self.__prov22, width = 25)
self.bprov22.grid(row = 5, column = 7, columnspan = 4)
self.bprov23 = Button(self.master, text='Kalimantan Selatan', command=self.__prov23, width = 25)
self.bprov23.grid(row = 6, column = 7, columnspan = 4)
self.bprov24 = Button(self.master, text='Kalimantan Timur', command=self.__prov24, width = 25)
self.bprov24.grid(row = 7, column = 7, columnspan = 4)
self.bprov25 = Button(self.master, text='Gorontalo', command=self.__prov25, width = 25)
self.bprov25.grid(row = 8, column = 7, columnspan = 4)
self.bprov26 = Button(self.master, text='Sulawesi Utara', command=self.__prov26, width = 25)
self.bprov26.grid(row = 9, column = 7, columnspan = 4)
self.bprov27 = Button(self.master, text='Sulawesi Barat', command=self.__prov27, width = 25)
self.bprov27.grid(row = 10, column = 7, columnspan = 4)
self.bprov28 = Button(self.master, text='Sulawesi Tengah', command=self.__prov28, width = 25)
self.bprov28.grid(row = 11, column = 7, columnspan = 4)
self.bprov29 = Button(self.master, text='Sulawesi Selatan', command=self.__prov29, width = 25)
self.bprov29.grid(row = 12, column = 7, columnspan = 4)
self.bprov30 = Button(self.master, text='Sulawesi Tenggara', command=self.__prov30, width = 25)
self.bprov30.grid(row = 13, column = 7, columnspan = 4)
self.bprov31 = Button(self.master, text='Maluku Utara', command=self.__prov31, width = 25)
self.bprov31.grid(row = 14, column = 7, columnspan = 4)
self.bprov32 = Button(self.master, text='Maluku', command=self.__prov32, width = 25)
self.bprov32.grid(row = 15, column = 7, columnspan = 4)
self.bprov33 = Button(self.master, text='Papua Barat', command=self.__prov33, width = 25)
self.bprov33.grid(row = 16, column = 7, columnspan = 4)
self.bprov34 = Button(self.master, text='Papua', command=self.__prov34, width = 25)
self.bprov34.grid(row = 17, column = 7, columnspan = 4)
self.master.mainloop()
def __prov1(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Mie Aceh', command=self.aceh1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Kue Timpan', command=self.aceh2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def aceh1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Aceh")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Mie Aceh.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[0]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def aceh2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Aceh")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Kue Timpan.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[1]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov2(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Dekke Na Niura', command=self.sumut1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Bika Ambon', command=self.sumut2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def sumut1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sumatera Utara")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Dekke Na Niura.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[2]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def sumut2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sumatera Utara")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Bika Ambon.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[3]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov3(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Rendang', command=self.sumbar1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Ampiang Dadiah', command=self.sumbar2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def sumbar1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sumatera Barat")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Rendang.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[4]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def sumbar2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sumatera Barat")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Ampiang Dadiah.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[5]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov4(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Bolu Kemojo', command=self.riau1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Asidah', command=self.riau2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def riau1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Riau")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Bolu Kemojo.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[6]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def riau2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Riau")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Asidah.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[7]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov5(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Luti Gendang', command=self.kriau1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Gong gong', command=self.kriau2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def kriau1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Kepulauan Riau")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Luti Gendang.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[8]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def kriau2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Kepulauan Riau")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Gong Gong.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[9]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov6(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Tempoyak', command=self.jambi1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Kue Padamaran', command=self.jambi2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def jambi1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Jambi")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Tempoyak.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[10]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def jambi2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Jambi")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Padamaran.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[11]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov7(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Lepek Binti', command=self.beng1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Bagar Hiu', command=self.beng2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def beng1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Bengkulu")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Lepek Binti.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[12]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def beng2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Bengkulu")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Bagar Hiu.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[13]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov8(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Pempek Palembang', command=self.sumsel1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Tekwan Palembang', command=self.sumsel2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def sumsel1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sumatera Selatan")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Pempek Palembang.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[14]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def sumsel2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sumatera Selatan")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Tekwan Palembang.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[15]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov9(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Martabak Bangka', command=self.kbang1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Belacan Belitung', command=self.kbang2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def kbang1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Bangka Belitung")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Martabak Bangka.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[16]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def kbang2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Bangka Belitung")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Belaca Belitung.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[17]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov10(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Seruit Lampung', command=self.lamp1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Gulai Taboh', command=self.lamp2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def lamp1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Lampung")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Seruit Lampung.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[18]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def lamp2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Lampung")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Gulai Taboh.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[19]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov11(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Soto Betawi', command=self.dki1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Sayur Babanci', command=self.dki2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def dki1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional DKI Jakarta")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Soto Betawi.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[20]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def dki2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional DKI Jakarta")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Sayur Babanci.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[21]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov12(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Nasi Sumsum', command=self.ban1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Sate Bandeng', command=self.ban2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def ban1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional BAnten")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Nasi Sumsum.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[22]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def ban2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Banten")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Sate Bandeng.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[23]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov13(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Nasi Jamblang Daun Jati', command=self.jabar1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Dorokdok', command=self.jabar2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def jabar1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Jawa Barat")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Nasi Jamblang Daun Jati.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[24]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def jabar2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Jawa Barat")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Dorokdok.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[25]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov14(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Tiwul', command=self.diy1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Jadah Tempe', command=self.diy2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def diy1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional DI Yogyakarta")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Tiwul.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[26]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def diy2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional DI Yogyakarta")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Jadah Tempe.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[27]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov15(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Soto Kudus', command=self.jateng1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Nasi Gerombyang', command=self.jateng2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def jateng1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Jawa TEngah")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Soto Kudus.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[28]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def jateng2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Jawa TEngah")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Nasi Grombyang.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[29]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov16(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Rujak Cingur', command=self.jatim1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Rawon', command=self.jatim2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def jatim1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Jawa Timur")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Rujak Cingur.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[30]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def jatim2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Jawa Timur")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Rawon.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[31]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov17(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Sate Lilit', command=self.bali1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Ayam Betutu', command=self.bali2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def bali1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Bali")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Sate Lilit.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[32]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def bali2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Bali")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Ayam Betutu.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[33]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov18(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Ayam Taliwang', command=self.ntb1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Sate Bulayak', command=self.ntb2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def ntb1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional NTB")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Ayam Taliwang.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[34]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def ntb2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional NTB")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Sate Bulayak.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[35]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov19(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text="Se'i", command=self.ntt1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Tapa Kolo', command=self.ntt2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def ntt1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional NTT")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Sei.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[36]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def ntt2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional NTT")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Tapa Kolo.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[37]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov20(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Lawa', command=self.kalut1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Kepiting Soka', command=self.kalut2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def kalut1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Kalimantan Utara")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Lawa.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[38]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def kalut2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Kalimantan Utara")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Kepiting Soka.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[39]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov21(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Bubur Paddas Sambas', command=self.kalbar1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Asam Padas Tempoyak', command=self.kalbar2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def kalbar1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Kalimantan Barat")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Bubur Paddas Sambas.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[40]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def kalbar2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Kalimantan Barat")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Asam Pedas Tempoyak.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[41]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov22(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Kalumpe', command=self.kalteng1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Wadi Patin', command=self.kalteng2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def kalteng1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Kalimantan Tengha")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Kalumpe.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[42]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def kalteng2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Kalimantan Tengah")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Wadi Patin.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[43]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov23(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Manday', command=self.kalsel1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Gangan Asam Banjar', command=self.kalsel2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def kalsel1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Kalimantan Selatan")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Manday.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[44]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def kalsel2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Kalimantan Selatan")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Gangan Asam Banjar.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[45]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov24(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Ayam Cincane', command=self.kaltim1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Nasi Bekekpor', command=self.kaltim2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def kaltim1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Kalimnantan Timur")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Ayam Cincane.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[46]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def kaltim2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Kalimantan Timur")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Nasi Bekepor.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[47]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov25(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Binte Biluhuta', command=self.goron1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Bilenthango', command=self.goron2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def goron1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Gorontalo")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Binte Biluhuta.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[48]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def goron2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Gorontalo")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Bilenthango.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[49]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov26(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Kelapertaar', command=self.sulut1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Tinutuan', command=self.sulut2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def sulut1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sulawesi Utara")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Klapertaart.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[50]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def sulut2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sulawesi Utara")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Tinutuan.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[51]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov27(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Apang Bugis', command=self.sulbar1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Jepa', command=self.sulbar2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def sulbar1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sulawesi Barat ")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Apang Bugis.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[52]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def sulbar2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sulawesi Barat")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Jepa.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[53]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov28(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Uta Kelo', command=self.sulteng1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Kaledo', command=self.sulteng2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def sulteng1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sulawesi Tengah")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Uta Kelo.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[54]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def sulteng2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sulawesi Tengah")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Kaledo.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[55]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov29(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Sop Konro', command=self.sulsel1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Coto Makassar', command=self.sulsel2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def sulsel1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sulawesi Selatan")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Sop Konro.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[56]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def sulsel2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sulawesi Selatan")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Coto Makassar.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[57]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov30(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Sinonggi', command=self.sulgar1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Kasoami', command=self.sulgar2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def sulgar1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sulawesi Tenggara")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Sinonggi.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[58]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def sulgar2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Sulawesi Tenggara")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Kasoami.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[59]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov31(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Gatang Kenari', command=self.malut1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Nasi Lapola', command=self.malut2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def malut1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Maluku Utara")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Gatang Kenari.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[60]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def malut2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Maluku Utara")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Nasi Lapola.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[61]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov32(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Sambal Colo-colo', command=self.malu1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Kohu-kohu', command=self.malu2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def malu1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Maluku")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Sambal Colo Colo.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[62]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def malu2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Maluku")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Kohu Kohu.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[63]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov33(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Ikan Bakar Manokwari', command=self.pabar1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Sate Ulat Sagu', command=self.pabar2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def pabar1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Papua Barat")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Ikan Bakar Manokwari.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[64]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def pabar2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Papua Barat")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Sate Ulat Sagu.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[65]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def __prov34(self):
master0 = Tk()
master0.minsize(width = 450, height = 100)
master0.maxsize(width = 450, height = 100)
master0.title("Direktori Makanan Tradisional Nusantara")
master0.judul = Label(master0, text = "Pilih makanan :)", font = "Arial 16 bold")
master0.judul.grid(row = 0, column = 2, columnspan = 4)
master0.bprov1 = Button(master0, text='Udang Salingkuh', command=self.papua1, width = 25, height=3)
master0.bprov1.grid(row = 1, column = 2, columnspan = 4)
master0.bprov2 = Button(master0, text='Papeda', command=self.papua2, width = 25, height=3)
master0.bprov2.grid(row = 1, column = 6, columnspan = 4)
master0.mainloop()
def papua1(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Papua")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Udang Selingkuh.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[66]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
def papua2(self):
master2 = Tk()
master2.minsize(width = 800, height = 600)
master2.maxsize(width = 800, height = 600)
master2.title("Makanan tradisional Papua")
canvas = Canvas(master2, width = 300, height = 300)
canvas.pack()
img = PhotoImage(master = canvas,file="Papeda.png")
canvas.create_image(5,5, anchor=NW, image=img)
text1 = lst[67]
text2 = Text(master2, font = "Arial 12")
text2.insert(INSERT, text1)
text2.pack()
master2.mainloop()
if __name__ == "__main__":
readf()
DirektoriMakanan()
| 41.477493
| 119
| 0.581842
| 8,395
| 72,793
| 5.019416
| 0.05277
| 0.029475
| 0.045185
| 0.054867
| 0.862689
| 0.829774
| 0.829774
| 0.829774
| 0.829774
| 0.829774
| 0
| 0.089873
| 0.294877
| 72,793
| 1,754
| 120
| 41.50114
| 0.731082
| 0.000522
| 0
| 0.721649
| 0
| 0
| 0.099997
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.071478
| false
| 0
| 0.000687
| 0
| 0.072852
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6e0451b36b332d002803b8ea51d69f44c7236e6c
| 19,656
|
py
|
Python
|
cpdb/cr/tests/views/test_cr_mobile_viewset.py
|
invinst/CPDBv2_backend
|
b4e96d620ff7a437500f525f7e911651e4a18ef9
|
[
"Apache-2.0"
] | 25
|
2018-07-20T22:31:40.000Z
|
2021-07-15T16:58:41.000Z
|
cpdb/cr/tests/views/test_cr_mobile_viewset.py
|
invinst/CPDBv2_backend
|
b4e96d620ff7a437500f525f7e911651e4a18ef9
|
[
"Apache-2.0"
] | 13
|
2018-06-18T23:08:47.000Z
|
2022-02-10T07:38:25.000Z
|
cpdb/cr/tests/views/test_cr_mobile_viewset.py
|
invinst/CPDBv2_backend
|
b4e96d620ff7a437500f525f7e911651e4a18ef9
|
[
"Apache-2.0"
] | 6
|
2018-05-17T21:59:43.000Z
|
2020-11-17T00:30:26.000Z
|
from datetime import datetime, date
from django.urls import reverse
from django.contrib.gis.geos import Point
from mock import patch
from rest_framework.test import APITestCase
from rest_framework import status
from robber import expect
import pytz
from data.factories import (
OfficerFactory, AllegationFactory, OfficerAllegationFactory, ComplainantFactory, AreaFactory,
PoliceWitnessFactory, InvestigatorFactory, InvestigatorAllegationFactory,
AllegationCategoryFactory, AttachmentFileFactory, OfficerBadgeNumberFactory, VictimFactory
)
from data.constants import MEDIA_TYPE_DOCUMENT
from cr.tests.mixins import CRTestCaseMixin
from data.cache_managers import officer_cache_manager, allegation_cache_manager
from email_service.constants import CR_ATTACHMENT_REQUEST
from email_service.factories import EmailTemplateFactory
class CRMobileViewSetTestCase(CRTestCaseMixin, APITestCase):
def test_retrieve(self):
area = AreaFactory(name='Lincoln Square')
officer1 = OfficerFactory(
id=123,
first_name='Mr',
last_name='Foo',
gender='M',
race='White',
rank='Officer',
appointed_date=date(2001, 1, 1),
birth_year=1993,
complaint_percentile=4.4,
civilian_allegation_percentile=1.1,
internal_allegation_percentile=2.2,
trr_percentile=3.3,
allegation_count=1,
sustained_count=1,
)
OfficerBadgeNumberFactory(officer=officer1, star='12345', current=True)
allegation = AllegationFactory(
crid='12345', point=Point(12, 21), incident_date=datetime(2002, 2, 28, tzinfo=pytz.utc), add1=3510,
add2='Michigan Ave', city='Chicago', location='Police Communications System', beat=area,
is_officer_complaint=False, summary='Summary',
first_start_date=date(2003, 3, 20),
first_end_date=date(2006, 5, 26)
)
ComplainantFactory(allegation=allegation, gender='M', race='Black', age='18')
VictimFactory(allegation=allegation, gender='M', race='Black', age=53)
OfficerAllegationFactory(
officer=officer1, allegation=allegation, final_finding='SU',
final_outcome='Separation', start_date=date(2003, 3, 20), end_date=date(2006, 5, 26),
allegation_category=AllegationCategoryFactory(
category='Operation/Personnel Violations',
allegation_name='Secondary/Special Employment'
)
)
officer = OfficerFactory(
id=3,
first_name='Raymond',
last_name='Piwinicki',
appointed_date=date(2001, 5, 1),
complaint_percentile=4.4,
trr_percentile=5.5,
allegation_count=1,
sustained_count=1,
)
OfficerAllegationFactory(
officer=officer,
final_finding='SU',
start_date=date(2003, 2, 28),
allegation__incident_date=datetime(2002, 2, 28, tzinfo=pytz.utc),
allegation__is_officer_complaint=False
)
PoliceWitnessFactory(officer=officer, allegation=allegation)
investigator = OfficerFactory(
id=1,
first_name='Ellis',
last_name='Skol',
appointed_date=date(2001, 5, 1),
complaint_percentile=6.6,
civilian_allegation_percentile=7.7,
internal_allegation_percentile=8.8,
allegation_count=1,
sustained_count=0,
)
OfficerAllegationFactory(
officer=investigator,
final_finding='NS',
start_date=date(2003, 2, 28),
allegation__incident_date=datetime(2002, 2, 28, tzinfo=pytz.utc),
allegation__is_officer_complaint=False
)
investigator = InvestigatorFactory(officer=investigator)
InvestigatorAllegationFactory(
allegation=allegation,
investigator=investigator,
)
AttachmentFileFactory(
tag='TRR',
allegation=allegation,
title='CR document',
id='123456',
url='http://cr-document.com/',
file_type=MEDIA_TYPE_DOCUMENT
)
AttachmentFileFactory(
tag='TRR',
allegation=allegation, title='CR arrest report document',
url='http://cr-document.com/', file_type=MEDIA_TYPE_DOCUMENT
)
AttachmentFileFactory(
tag='AR',
allegation=allegation,
title='CR document 2',
id='654321',
url='http://AR-document.com/',
file_type=MEDIA_TYPE_DOCUMENT
)
officer_cache_manager.build_cached_columns()
allegation_cache_manager.cache_data()
response = self.client.get(reverse('api-v2:cr-mobile-detail', kwargs={'pk': '12345'}))
expect(response.status_code).to.eq(status.HTTP_200_OK)
expect(dict(response.data)).to.eq({
'crid': '12345',
'most_common_category': {
'category': 'Operation/Personnel Violations',
'allegation_name': 'Secondary/Special Employment'
},
'coaccused': [
{
'id': 123,
'full_name': 'Mr Foo',
'rank': 'Officer',
'final_outcome': 'Separation',
'final_finding': 'Sustained',
'allegation_count': 1,
'category': 'Operation/Personnel Violations',
'percentile_allegation': '4.4000',
'percentile_allegation_civilian': '1.1000',
'percentile_allegation_internal': '2.2000',
'percentile_trr': '3.3000',
}
],
'complainants': [
{
'race': 'Black',
'gender': 'Male',
'age': 18
}
],
'victims': [
{
'race': 'Black',
'gender': 'Male',
'age': 53
}
],
'point': {
'lon': 12.0,
'lat': 21.0
},
'summary': 'Summary',
'incident_date': '2002-02-28',
'start_date': '2003-03-20',
'end_date': '2006-05-26',
'address': '3510 Michigan Ave, Chicago',
'location': 'Police Communications System',
'beat': 'Lincoln Square',
'involvements': [
{
'involved_type': 'investigator',
'officer_id': 1,
'full_name': 'Ellis Skol',
'badge': 'CPD',
'percentile_allegation': '6.6000',
'percentile_allegation_civilian': '7.7000',
'percentile_allegation_internal': '8.8000',
},
{
'involved_type': 'police_witness',
'officer_id': 3,
'full_name': 'Raymond Piwinicki',
'allegation_count': 1,
'sustained_count': 1,
'percentile_trr': '5.5000',
'percentile_allegation': '4.4000',
}
],
'attachments': [
{
'title': 'CR document',
'file_type': 'document',
'url': 'http://cr-document.com/',
'id': '123456',
}
]
})
def test_retrieve_badge(self):
area = AreaFactory(name='Lincoln Square')
officer1 = OfficerFactory(
id=123,
first_name='Mr',
last_name='Foo',
gender='M',
race='White',
rank='Officer',
appointed_date=date(2001, 1, 1),
birth_year=1993,
complaint_percentile=4.4,
civilian_allegation_percentile=1.1,
internal_allegation_percentile=2.2,
trr_percentile=3.3,
allegation_count=1,
sustained_count=1,
)
OfficerBadgeNumberFactory(officer=officer1, star='12345', current=True)
allegation = AllegationFactory(
crid='12345', point=Point(12, 21), incident_date=datetime(2007, 2, 28, tzinfo=pytz.utc), add1=3510,
add2='Michigan Ave', city='Chicago', location='Police Communications System', beat=area,
is_officer_complaint=False, summary='Summary',
first_start_date=date(2003, 3, 20),
first_end_date=date(2006, 5, 26)
)
ComplainantFactory(allegation=allegation, gender='M', race='Black', age='18')
VictimFactory(allegation=allegation, gender='M', race='Black', age=53)
OfficerAllegationFactory(
officer=officer1, allegation=allegation, final_finding='SU', disciplined=True,
final_outcome='Separation', start_date=date(2003, 3, 20), end_date=date(2006, 5, 26),
allegation_category=AllegationCategoryFactory(
category='Operation/Personnel Violations',
allegation_name='Secondary/Special Employment'
)
)
officer = OfficerFactory(
id=3,
first_name='Raymond',
last_name='Piwinicki',
appointed_date=date(2001, 5, 1),
complaint_percentile=9.9,
trr_percentile=5.5,
allegation_count=1,
sustained_count=1,
)
OfficerAllegationFactory(
officer=officer,
final_finding='SU',
start_date=date(2003, 2, 28),
allegation__incident_date=datetime(2002, 2, 28, tzinfo=pytz.utc),
allegation__is_officer_complaint=False
)
PoliceWitnessFactory(officer=officer, allegation=allegation)
investigator = OfficerFactory(
id=1,
first_name='Ellis',
last_name='Skol',
appointed_date=date(2001, 5, 1),
complaint_percentile=6.6,
civilian_allegation_percentile=7.7,
internal_allegation_percentile=8.8,
allegation_count=1,
sustained_count=0,
)
investigator_2 = OfficerFactory(
id=2,
first_name='Jerome',
last_name='Finnigan',
appointed_date=date(2001, 5, 1),
complaint_percentile=6.6,
civilian_allegation_percentile=7.7,
internal_allegation_percentile=8.8,
allegation_count=1,
sustained_count=0,
)
investigator_3 = OfficerFactory(
id=4,
first_name='Edward',
last_name='May',
appointed_date=date(2001, 5, 1),
complaint_percentile=9.9,
civilian_allegation_percentile=7.7,
internal_allegation_percentile=8.8,
allegation_count=1,
sustained_count=0,
)
OfficerBadgeNumberFactory(officer=investigator_2, star='456789', current=True)
OfficerAllegationFactory(
officer=investigator,
final_finding='NS',
start_date=date(2003, 2, 28),
allegation__incident_date=datetime(2002, 2, 28, tzinfo=pytz.utc),
allegation__is_officer_complaint=False
)
investigator = InvestigatorFactory(officer=investigator)
investigator_2 = InvestigatorFactory(officer=investigator_2)
investigator_3 = InvestigatorFactory(officer=investigator_3)
investigator_4 = InvestigatorFactory(first_name='Kevin', last_name='Osborn')
InvestigatorAllegationFactory(
allegation=allegation,
investigator=investigator,
current_star='123456'
)
InvestigatorAllegationFactory(
allegation=allegation,
investigator=investigator_2,
current_star=None
)
InvestigatorAllegationFactory(
allegation=allegation,
investigator=investigator_3,
current_star=None
)
InvestigatorAllegationFactory(
allegation=allegation,
investigator=investigator_4,
current_star=None
)
AttachmentFileFactory(
tag='TRR',
allegation=allegation,
title='CR document',
id='123456',
url='http://cr-document.com/',
file_type=MEDIA_TYPE_DOCUMENT
)
AttachmentFileFactory(
tag='AR',
allegation=allegation,
title='CR document 2',
id='654321',
url='http://AR-document.com/',
file_type=MEDIA_TYPE_DOCUMENT
)
officer_cache_manager.build_cached_columns()
allegation_cache_manager.cache_data()
response = self.client.get(reverse('api-v2:cr-mobile-detail', kwargs={'pk': '12345'}))
expect(response.status_code).to.eq(status.HTTP_200_OK)
expect(dict(response.data)).to.eq({
'crid': '12345',
'most_common_category': {
'category': 'Operation/Personnel Violations',
'allegation_name': 'Secondary/Special Employment'
},
'coaccused': [
{
'id': 123,
'full_name': 'Mr Foo',
'rank': 'Officer',
'final_outcome': 'Separation',
'final_finding': 'Sustained',
'allegation_count': 1,
'category': 'Operation/Personnel Violations',
'percentile_allegation': '4.4000',
'percentile_allegation_civilian': '1.1000',
'percentile_allegation_internal': '2.2000',
'percentile_trr': '3.3000',
}
],
'complainants': [
{
'race': 'Black',
'gender': 'Male',
'age': 18
}
],
'victims': [
{
'race': 'Black',
'gender': 'Male',
'age': 53
}
],
'point': {
'lon': 12.0,
'lat': 21.0
},
'summary': 'Summary',
'incident_date': '2007-02-28',
'start_date': '2003-03-20',
'end_date': '2006-05-26',
'address': '3510 Michigan Ave, Chicago',
'location': 'Police Communications System',
'beat': 'Lincoln Square',
'involvements': [
{
'involved_type': 'investigator',
'full_name': 'Kevin Osborn',
'badge': 'COPA/IPRA',
},
{
'involved_type': 'investigator',
'officer_id': 4,
'full_name': 'Edward May',
'badge': 'COPA/IPRA',
'percentile_allegation': '9.9000',
'percentile_allegation_civilian': '7.7000',
'percentile_allegation_internal': '8.8000',
},
{
'involved_type': 'investigator',
'officer_id': 2,
'full_name': 'Jerome Finnigan',
'badge': 'CPD',
'percentile_allegation': '6.6000',
'percentile_allegation_civilian': '7.7000',
'percentile_allegation_internal': '8.8000',
},
{
'involved_type': 'investigator',
'officer_id': 1,
'full_name': 'Ellis Skol',
'badge': 'CPD',
'percentile_allegation': '6.6000',
'percentile_allegation_civilian': '7.7000',
'percentile_allegation_internal': '8.8000',
},
{
'involved_type': 'police_witness',
'officer_id': 3,
'full_name': 'Raymond Piwinicki',
'allegation_count': 1,
'sustained_count': 1,
'percentile_allegation': '9.9000',
'percentile_trr': '5.5000',
}
],
'attachments': [
{
'title': 'CR document',
'file_type': 'document',
'url': 'http://cr-document.com/',
'id': '123456',
}
]
})
def test_retrieve_not_found(self):
response = self.client.get(reverse('api-v2:cr-mobile-detail', kwargs={'pk': '45678'}))
expect(response.status_code).to.eq(status.HTTP_404_NOT_FOUND)
@patch('cr.views.send_attachment_request_email')
def test_request_document(self, mock_send_attachment_request_email):
EmailTemplateFactory(type=CR_ATTACHMENT_REQUEST)
AllegationFactory(crid='112233')
response = self.client.post(
reverse('api-v2:cr-mobile-request-document', kwargs={'pk': '112233'}),
{'email': 'valid_email@example.com'}
)
expect(response.status_code).to.eq(status.HTTP_200_OK)
expect(response.data).to.eq({
'message': 'Thanks for subscribing',
'crid': '112233'
})
expect(mock_send_attachment_request_email).to.be.called_once_with(
'valid_email@example.com',
attachment_type='cr_request',
pk='112233',
)
def test_request_same_document_twice(self):
EmailTemplateFactory(type=CR_ATTACHMENT_REQUEST)
allegation = AllegationFactory(crid='112233')
self.client.post(
reverse('api-v2:cr-mobile-request-document', kwargs={'pk': allegation.crid}),
{'email': 'valid_email@example.com'}
)
response2 = self.client.post(
reverse('api-v2:cr-mobile-request-document', kwargs={'pk': allegation.crid}),
{'email': 'valid_email@example.com'}
)
expect(response2.status_code).to.eq(status.HTTP_400_BAD_REQUEST)
expect(response2.data).to.eq({
'message': 'Email already added',
'crid': '112233'
})
def test_request_document_without_email(self):
AllegationFactory(crid='321')
response = self.client.post(reverse('api-v2:cr-mobile-request-document', kwargs={'pk': 321}))
expect(response.status_code).to.eq(status.HTTP_400_BAD_REQUEST)
expect(response.data).to.eq({
'message': 'Please enter a valid email'
})
def test_request_document_with_invalid_email(self):
AllegationFactory(crid='321')
response = self.client.post(reverse('api-v2:cr-mobile-request-document', kwargs={'pk': 321}),
{'email': 'invalid@email'})
expect(response.status_code).to.eq(status.HTTP_400_BAD_REQUEST)
expect(response.data).to.eq({
'message': 'Please enter a valid email'
})
def test_request_document_with_invalid_allegation(self):
response = self.client.post(reverse('api-v2:cr-mobile-request-document', kwargs={'pk': 321}))
expect(response.status_code).to.eq(status.HTTP_404_NOT_FOUND)
| 38.616896
| 111
| 0.536121
| 1,743
| 19,656
| 5.838784
| 0.144005
| 0.015722
| 0.018866
| 0.024565
| 0.832072
| 0.785398
| 0.779306
| 0.776358
| 0.759065
| 0.758672
| 0
| 0.055018
| 0.353632
| 19,656
| 508
| 112
| 38.692913
| 0.746006
| 0
| 0
| 0.709016
| 0
| 0
| 0.188848
| 0.047059
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016393
| false
| 0
| 0.028689
| 0
| 0.047131
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
6e15778b6aa2fdc1db68325173a20c24ddb19ca0
| 82
|
py
|
Python
|
utils/tests.py
|
none-da/zeshare
|
6c13cd3bd9d82d89f53d4a8b287fe2c30f1d3779
|
[
"BSD-3-Clause"
] | null | null | null |
utils/tests.py
|
none-da/zeshare
|
6c13cd3bd9d82d89f53d4a8b287fe2c30f1d3779
|
[
"BSD-3-Clause"
] | null | null | null |
utils/tests.py
|
none-da/zeshare
|
6c13cd3bd9d82d89f53d4a8b287fe2c30f1d3779
|
[
"BSD-3-Clause"
] | 1
|
2021-04-12T11:43:38.000Z
|
2021-04-12T11:43:38.000Z
|
from utils.alltests.models_tests import *
from utils.alltests.views_tests import *
| 41
| 41
| 0.841463
| 12
| 82
| 5.583333
| 0.583333
| 0.268657
| 0.507463
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085366
| 82
| 2
| 42
| 41
| 0.893333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
6e4d7087233b7ae2b11a56e63f18198deb2e3d24
| 235
|
py
|
Python
|
source/nomenclator/dialog/__init__.py
|
buddly27/nomenclator-nuke
|
783a9ff9bba0a974cf4532ca5c14bb3fc0312af9
|
[
"MIT"
] | 11
|
2021-09-06T15:41:28.000Z
|
2022-03-21T23:52:59.000Z
|
source/nomenclator/dialog/__init__.py
|
buddly27/nomenclator-nuke
|
783a9ff9bba0a974cf4532ca5c14bb3fc0312af9
|
[
"MIT"
] | 2
|
2021-09-14T02:56:55.000Z
|
2021-09-14T03:00:03.000Z
|
source/nomenclator/dialog/__init__.py
|
buddly27/nomenclator-nuke
|
783a9ff9bba0a974cf4532ca5c14bb3fc0312af9
|
[
"MIT"
] | 2
|
2021-09-07T06:53:06.000Z
|
2021-09-13T19:20:22.000Z
|
# -*- coding: utf-8 -*-
from .comp_manager_dialog import CompoManagerDialog
from .outputs_manager_dialog import OutputsManagerDialog
from .project_manager_dialog import ProjectManagerDialog
from .settings_dialog import SettingsDialog
| 33.571429
| 56
| 0.851064
| 26
| 235
| 7.423077
| 0.576923
| 0.248705
| 0.295337
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004695
| 0.093617
| 235
| 6
| 57
| 39.166667
| 0.901408
| 0.089362
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
289374ebe125fc7af5df9a84211193c93b13005b
| 22,179
|
py
|
Python
|
tests/unit/test_virtual_service.py
|
Dannyzen/python-kemptech-api
|
7836312df517c128f48decbd9db39727a0358d97
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_virtual_service.py
|
Dannyzen/python-kemptech-api
|
7836312df517c128f48decbd9db39727a0358d97
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_virtual_service.py
|
Dannyzen/python-kemptech-api
|
7836312df517c128f48decbd9db39727a0358d97
|
[
"Apache-2.0"
] | 1
|
2021-05-20T21:41:36.000Z
|
2021-05-20T21:41:36.000Z
|
from nose.tools import (assert_equal, assert_raises, assert_in,
assert_is_instance, assert_not_equal)
# handle py3 and py2 cases:
try:
import unittest.mock as mock
except ImportError:
import mock
patch = mock.patch
sentinel = mock.sentinel
from python_kemptech_api import objects
import python_kemptech_api.exceptions as exceptions
from python_kemptech_api.objects import VirtualService, RealServer
ValidationError = exceptions.ValidationError
class Test_VirtualService:
def setup(self):
self.lm_info = {
"endpoint": "https://1.1.1.1:443/access",
"ip_address": "1.1.1.1",
"auth": ("bal", "2fourall"),
}
self.vs = VirtualService(self.lm_info, "1.1.1.2")
# Contains no subvs's
self.vs_get_response = "<Response stat=\"200\" code=\"ok\"> <Success> <Data> <Status>Down</Status> <Index>1</Index> <VSAddress>10.154.75.123</VSAddress> <VSPort>80</VSPort> <Enable>Y</Enable> <SSLReverse>N</SSLReverse> <SSLReencrypt>N</SSLReencrypt> <Intercept>N</Intercept> <InterceptOpts> <Opt>opnormal</Opt> <Opt>auditrelevant</Opt> <Opt>reqdatadisable</Opt> <Opt>resdatadisable</Opt> </InterceptOpts> <AlertThreshold>0</AlertThreshold> <Transactionlimit>0</Transactionlimit> <Transparent>Y</Transparent> <SubnetOriginating>N</SubnetOriginating> <ServerInit>0</ServerInit> <StartTLSMode>0</StartTLSMode> <Idletime>0</Idletime> <Cache>N</Cache> <Compress>N</Compress> <Verify>0</Verify> <UseforSnat>N</UseforSnat> <ForceL7>Y</ForceL7> <MultiConnect>N</MultiConnect> <ClientCert>0</ClientCert> <ErrorCode>0</ErrorCode> <CheckUse1.1>N</CheckUse1.1> <MatchLen>0</MatchLen> <CheckUseGet>0</CheckUseGet> <SSLRewrite>0</SSLRewrite> <VStype>http</VStype> <FollowVSID>0</FollowVSID> <Protocol>tcp</Protocol> <Schedule>rr</Schedule> <CheckType>http</CheckType> <PersistTimeout>0</PersistTimeout> <CheckPort>0</CheckPort> <NRules>0</NRules> <NRequestRules>0</NRequestRules> <NResponseRules>0</NResponseRules> <NPreProcessRules>0</NPreProcessRules> <EspEnabled>N</EspEnabled> <InputAuthMode>0</InputAuthMode> <OutputAuthMode>0</OutputAuthMode> <MasterVS>0</MasterVS> <MasterVSID>0</MasterVSID> <AddVia>0</AddVia> <QoS>0</QoS> <TlsType>0</TlsType> <NeedHostName>N</NeedHostName> <OCSPVerify>N</OCSPVerify> <EnhancedHealthChecks>N</EnhancedHealthChecks> <RsMinimum>0</RsMinimum> <NumberOfRSs>0</NumberOfRSs> </Data> </Success> </Response>"
# Contains default cert
self.vs_get_response_defaultcert = "<Response stat=\"200\" code=\"ok\"> <Success> <Data> <Status>Down</Status> <Index>1</Index> <VSAddress>10.154.75.123</VSAddress> <VSPort>80</VSPort> <Enable>Y</Enable> <SSLReverse>N</SSLReverse> <SSLReencrypt>N</SSLReencrypt> <Intercept>N</Intercept> <InterceptOpts> <Opt>opnormal</Opt> <Opt>auditrelevant</Opt> <Opt>reqdatadisable</Opt> <Opt>resdatadisable</Opt> </InterceptOpts> <AlertThreshold>0</AlertThreshold> <Transactionlimit>0</Transactionlimit> <Transparent>Y</Transparent> <SubnetOriginating>N</SubnetOriginating> <ServerInit>0</ServerInit> <StartTLSMode>0</StartTLSMode> <Idletime>0</Idletime> <Cache>N</Cache> <Compress>N</Compress> <Verify>0</Verify> <UseforSnat>N</UseforSnat> <ForceL7>Y</ForceL7> <MultiConnect>N</MultiConnect> <ClientCert>0</ClientCert> <ErrorCode>0</ErrorCode> <CheckUse1.1>N</CheckUse1.1> <MatchLen>0</MatchLen> <CheckUseGet>0</CheckUseGet> <SSLRewrite>0</SSLRewrite> <VStype>http</VStype> <FollowVSID>0</FollowVSID> <Protocol>tcp</Protocol> <Schedule>rr</Schedule> <CheckType>http</CheckType> <PersistTimeout>0</PersistTimeout> <CheckPort>0</CheckPort> <NRules>0</NRules> <NRequestRules>0</NRequestRules> <NResponseRules>0</NResponseRules> <NPreProcessRules>0</NPreProcessRules> <EspEnabled>N</EspEnabled> <InputAuthMode>0</InputAuthMode> <OutputAuthMode>0</OutputAuthMode> <MasterVS>0</MasterVS> <MasterVSID>0</MasterVSID> <AddVia>0</AddVia> <QoS>0</QoS> <TlsType>0</TlsType> <NeedHostName>N</NeedHostName> <OCSPVerify>N</OCSPVerify> <EnhancedHealthChecks>N</EnhancedHealthChecks> <RsMinimum>0</RsMinimum> <NumberOfRSs>0</NumberOfRSs> <SSLAcceleration>Y</SSLAcceleration> <CertFile>f5d7b5869a48de4e30930785dcff3657</CertFile> </Data> </Success> </Response>"
# Contains one cert
self.vs_get_response_singlecert = "<Response stat=\"200\" code=\"ok\"> <Success> <Data> <Status>Down</Status> <Index>1</Index> <VSAddress>10.154.75.123</VSAddress> <VSPort>80</VSPort> <Enable>Y</Enable> <SSLReverse>N</SSLReverse> <SSLReencrypt>N</SSLReencrypt> <Intercept>N</Intercept> <InterceptOpts> <Opt>opnormal</Opt> <Opt>auditrelevant</Opt> <Opt>reqdatadisable</Opt> <Opt>resdatadisable</Opt> </InterceptOpts> <AlertThreshold>0</AlertThreshold> <Transactionlimit>0</Transactionlimit> <Transparent>Y</Transparent> <SubnetOriginating>N</SubnetOriginating> <ServerInit>0</ServerInit> <StartTLSMode>0</StartTLSMode> <Idletime>0</Idletime> <Cache>N</Cache> <Compress>N</Compress> <Verify>0</Verify> <UseforSnat>N</UseforSnat> <ForceL7>Y</ForceL7> <MultiConnect>N</MultiConnect> <ClientCert>0</ClientCert> <ErrorCode>0</ErrorCode> <CheckUse1.1>N</CheckUse1.1> <MatchLen>0</MatchLen> <CheckUseGet>0</CheckUseGet> <SSLRewrite>0</SSLRewrite> <VStype>http</VStype> <FollowVSID>0</FollowVSID> <Protocol>tcp</Protocol> <Schedule>rr</Schedule> <CheckType>http</CheckType> <PersistTimeout>0</PersistTimeout> <CheckPort>0</CheckPort> <NRules>0</NRules> <NRequestRules>0</NRequestRules> <NResponseRules>0</NResponseRules> <NPreProcessRules>0</NPreProcessRules> <EspEnabled>N</EspEnabled> <InputAuthMode>0</InputAuthMode> <OutputAuthMode>0</OutputAuthMode> <MasterVS>0</MasterVS> <MasterVSID>0</MasterVSID> <AddVia>0</AddVia> <QoS>0</QoS> <TlsType>0</TlsType> <NeedHostName>N</NeedHostName> <OCSPVerify>N</OCSPVerify> <EnhancedHealthChecks>N</EnhancedHealthChecks> <RsMinimum>0</RsMinimum> <NumberOfRSs>0</NumberOfRSs> <SSLAcceleration>Y</SSLAcceleration> <CertFile>cert1</CertFile> </Data> </Success> </Response>"
# Contains multiple certs
self.vs_get_response_multicert = "<Response stat=\"200\" code=\"ok\"> <Success> <Data> <Status>Down</Status> <Index>1</Index> <VSAddress>10.154.75.123</VSAddress> <VSPort>80</VSPort> <Enable>Y</Enable> <SSLReverse>N</SSLReverse> <SSLReencrypt>N</SSLReencrypt> <Intercept>N</Intercept> <InterceptOpts> <Opt>opnormal</Opt> <Opt>auditrelevant</Opt> <Opt>reqdatadisable</Opt> <Opt>resdatadisable</Opt> </InterceptOpts> <AlertThreshold>0</AlertThreshold> <Transactionlimit>0</Transactionlimit> <Transparent>Y</Transparent> <SubnetOriginating>N</SubnetOriginating> <ServerInit>0</ServerInit> <StartTLSMode>0</StartTLSMode> <Idletime>0</Idletime> <Cache>N</Cache> <Compress>N</Compress> <Verify>0</Verify> <UseforSnat>N</UseforSnat> <ForceL7>Y</ForceL7> <MultiConnect>N</MultiConnect> <ClientCert>0</ClientCert> <ErrorCode>0</ErrorCode> <CheckUse1.1>N</CheckUse1.1> <MatchLen>0</MatchLen> <CheckUseGet>0</CheckUseGet> <SSLRewrite>0</SSLRewrite> <VStype>http</VStype> <FollowVSID>0</FollowVSID> <Protocol>tcp</Protocol> <Schedule>rr</Schedule> <CheckType>http</CheckType> <PersistTimeout>0</PersistTimeout> <CheckPort>0</CheckPort> <NRules>0</NRules> <NRequestRules>0</NRequestRules> <NResponseRules>0</NResponseRules> <NPreProcessRules>0</NPreProcessRules> <EspEnabled>N</EspEnabled> <InputAuthMode>0</InputAuthMode> <OutputAuthMode>0</OutputAuthMode> <MasterVS>0</MasterVS> <MasterVSID>0</MasterVSID> <AddVia>0</AddVia> <QoS>0</QoS> <TlsType>0</TlsType> <NeedHostName>N</NeedHostName> <OCSPVerify>N</OCSPVerify> <EnhancedHealthChecks>N</EnhancedHealthChecks> <RsMinimum>0</RsMinimum> <NumberOfRSs>0</NumberOfRSs> <SSLAcceleration>Y</SSLAcceleration> <CertFile>cert1 cert2 cert3 cert4</CertFile> </Data> </Success> </Response>"
# real server 'get'
self.rs_get_response = "<Response stat=\"200\" code=\"ok\"> <Success> <Data> <Rs> <Status>Down</Status> <VSIndex>0</VSIndex> <RsIndex>1</RsIndex> <Addr>10.154.123.13</Addr> <Port>80</Port> <Forward>nat</Forward> <Weight>1000</Weight> <Limit>0</Limit> <Enable>Y</Enable> <Critical>N</Critical> </Rs> </Data> </Success> </Response>"
# A 'get' of a subvs
self.subvs_get_response = "<Response stat=\"200\" code=\"ok\"> <Success> <Data> <Status>Down</Status> <Index>2</Index> <VSPort>0</VSPort> <Enable>Y</Enable> <SSLReverse>N</SSLReverse> <SSLReencrypt>N</SSLReencrypt> <Intercept>N</Intercept> <InterceptOpts> <Opt>opnormal</Opt> <Opt>auditnone</Opt> <Opt>reqdatadisable</Opt> <Opt>resdatadisable</Opt> </InterceptOpts> <AlertThreshold>0</AlertThreshold> <Transactionlimit>0</Transactionlimit> <Transparent>Y</Transparent> <SubnetOriginating>N</SubnetOriginating> <ServerInit>0</ServerInit> <StartTLSMode>0</StartTLSMode> <Idletime>0</Idletime> <Cache>N</Cache> <Compress>N</Compress> <Verify>0</Verify> <UseforSnat>N</UseforSnat> <ForceL7>Y</ForceL7> <MultiConnect>N</MultiConnect> <ClientCert>0</ClientCert> <ErrorCode>0</ErrorCode> <CheckUse1.1>N</CheckUse1.1> <MatchLen>0</MatchLen> <CheckUseGet>0</CheckUseGet> <SSLRewrite>0</SSLRewrite> <VStype>http</VStype> <FollowVSID>0</FollowVSID> <Protocol>tcp</Protocol> <Schedule>rr</Schedule> <CheckType>http</CheckType> <PersistTimeout>0</PersistTimeout> <CheckPort>0</CheckPort> <NRules>0</NRules> <NRequestRules>0</NRequestRules> <NResponseRules>0</NResponseRules> <NPreProcessRules>0</NPreProcessRules> <EspEnabled>N</EspEnabled> <InputAuthMode>0</InputAuthMode> <OutputAuthMode>0</OutputAuthMode> <MasterVS>0</MasterVS> <MasterVSID>1</MasterVSID> <AddVia>0</AddVia> <QoS>0</QoS> <TlsType>0</TlsType> <NeedHostName>N</NeedHostName> <OCSPVerify>N</OCSPVerify> <EnhancedHealthChecks>N</EnhancedHealthChecks> <RsMinimum>0</RsMinimum> <NumberOfRSs>0</NumberOfRSs> </Data> </Success> </Response>"
# A parent vs with a subvs newly created
self.create_subvs_get_response = "<Response stat=\"200\" code=\"ok\"> <Success> <Data> <Status>Down</Status> <Index>1</Index> <VSAddress>10.154.75.123</VSAddress> <VSPort>80</VSPort> <Enable>Y</Enable> <SSLReverse>N</SSLReverse> <SSLReencrypt>N</SSLReencrypt> <Intercept>N</Intercept> <InterceptOpts> <Opt>opnormal</Opt> <Opt>auditrelevant</Opt> <Opt>reqdatadisable</Opt> <Opt>resdatadisable</Opt> </InterceptOpts> <AlertThreshold>0</AlertThreshold> <Transactionlimit>0</Transactionlimit> <Transparent>Y</Transparent> <SubnetOriginating>N</SubnetOriginating> <ServerInit>0</ServerInit> <StartTLSMode>0</StartTLSMode> <Idletime>0</Idletime> <Cache>N</Cache> <Compress>N</Compress> <Verify>0</Verify> <UseforSnat>N</UseforSnat> <ForceL7>Y</ForceL7> <MultiConnect>N</MultiConnect> <ClientCert>0</ClientCert> <ErrorCode>0</ErrorCode> <CheckUse1.1>N</CheckUse1.1> <MatchLen>0</MatchLen> <CheckUseGet>0</CheckUseGet> <SSLRewrite>0</SSLRewrite> <VStype>http</VStype> <FollowVSID>0</FollowVSID> <Protocol>tcp</Protocol> <Schedule>rr</Schedule> <CheckType>http</CheckType> <PersistTimeout>0</PersistTimeout> <CheckPort>0</CheckPort> <NRules>0</NRules> <NRequestRules>0</NRequestRules> <NResponseRules>0</NResponseRules> <NPreProcessRules>0</NPreProcessRules> <EspEnabled>N</EspEnabled> <InputAuthMode>0</InputAuthMode> <OutputAuthMode>0</OutputAuthMode> <MasterVS>1</MasterVS> <MasterVSID>0</MasterVSID> <AddVia>0</AddVia> <QoS>0</QoS> <TlsType>0</TlsType> <NeedHostName>N</NeedHostName> <OCSPVerify>N</OCSPVerify> <RsMinimum>0</RsMinimum> <NumberOfRSs>1</NumberOfRSs> <SubVS> <Status>Down</Status> <VSIndex>2</VSIndex> <RsIndex>1</RsIndex> <Name>-</Name> <Forward>nat</Forward> <Weight>1000</Weight> <Limit>0</Limit> <Enable>Y</Enable> <Critical>N</Critical> </SubVS> </Data> </Success> </Response>"
def test_init_with_no_endpoint(self):
lm_info_with_no_endpoint = {"ip_address": "1.1.1.1"}
VirtualService(self.lm_info, "1.1.1.2")
with assert_raises(exceptions.VirtualServiceMissingLoadmasterInfo):
VirtualService(lm_info_with_no_endpoint, "1.1.1.2")
def test_init_with_no_ipaddress(self):
lm_info_with_no_ip_address = {"endpoint": "https://1.1.1.1:443/access"}
VirtualService(self.lm_info, "1.1.1.2")
with assert_raises(exceptions.VirtualServiceMissingLoadmasterInfo):
VirtualService(lm_info_with_no_ip_address, "1.1.1.2")
def test_str(self):
assert_equal(str(self.vs), "Virtual Service TCP 1.1.1.2:80 on "
"LoadMaster 1.1.1.1")
def test_get_base_parameters(self):
base_params = self.vs._get_base_parameters()
expected_params = {
"vs": "1.1.1.2",
"port": 80,
"prot": "tcp",
}
assert_equal(base_params, expected_params)
self.vs.index = 1
base_params = self.vs._get_base_parameters()
expected_params = {
"vs": 1,
}
assert_equal(base_params, expected_params)
def test_to_api_dict(self):
actual = self.vs.to_api_dict()
expected = {
"vs": "1.1.1.2",
"port": 80,
"prot": "tcp",
}
assert_equal(actual, expected)
def test_to_dict(self):
self.vs._ignore = None
actual = self.vs.to_dict()
expected = {
"endpoint": "https://1.1.1.1:443/access",
"ip_address": "1.1.1.1",
"vs": "1.1.1.2",
"port": 80,
"prot": "tcp",
"auth": ("bal", "2fourall"),
"subvs_entries": [],
"real_servers": [],
}
print(actual)
assert_equal(actual, expected)
def test_create_sub_virtual_service(self):
sub_vs = self.vs.create_sub_virtual_service()
actual = sub_vs._is_sub_vs
expected = True
assert_equal(actual, expected)
def test_subvs_cant_create_sub_virtual_service(self):
sub_vs = self.vs.create_sub_virtual_service()
with assert_raises(exceptions.SubVsCannotCreateSubVs):
sub_vs.create_sub_virtual_service()
def test_vs_save_certfile_list(self):
with patch.object(VirtualService, "_get") as _get:
_get.return_value = self.vs_get_response_multicert
self.vs.sslacceleration = "Y"
self.vs.certfile = ["cert1", "cert2", "cert3", "cert4"]
# use update=True to skip some code not in test for this case
self.vs.save(update=True)
expected = ["cert1", "cert2", "cert3", "cert4"]
actual = self.vs.certfile
assert_equal(actual, expected)
def test_vs_save_certfile_str(self):
with patch.object(VirtualService, "_get") as _get:
_get.return_value = self.vs_get_response_singlecert
print(self.vs.__repr__())
self.vs.sslacceleration = "Y"
self.vs.certfile = "cert1"
# use update=True to skip some code not in test for this case
self.vs.save(update=True)
print(self.vs.__repr__())
expected = ["cert1"]
actual = self.vs.certfile
assert_equal(actual, expected)
def test_vs_save_certfile_None(self):
with patch.object(VirtualService, "_get") as _get:
_get.return_value = self.vs_get_response_defaultcert
self.vs.sslacceleration = "N"
self.vs.certfile = ""
# use update=True to skip some code not in test for this case
self.vs.save(update=True)
expected = []
actual = self.vs.certfile
assert_equal(actual, expected)
def test_vs_save_persist(self):
with patch.object(VirtualService, "_get") as _get:
_get.return_value = self.vs_get_response
self.vs.persist = None
# use update=True to skip some code not in test for this case
self.vs.save(update=True)
expected = None
actual = self.vs.persisttimeout
assert_equal(actual, expected)
def test_save_add_normal_vs(self):
with patch.object(VirtualService, "_get") as _get:
_get.return_value = self.vs_get_response
expected = None
actual = self.vs.index
assert_equal(actual, expected)
self.vs.save()
expected = "1"
actual = self.vs.index
assert_equal(actual, expected)
def test_save_add_sub_vs(self):
with patch.object(VirtualService, "_get") as _get:
# set the index to pretend the parent vs has already been "saved"
self.vs.index = 1
subvs = self.vs.create_sub_virtual_service()
_get.side_effect = [self.vs_get_response, self.create_subvs_get_response, self.subvs_get_response]
# This will run through the big "Hell, thy name be subvs" block in VirtualService's save method
subvs.save()
expected = "2"
actual = subvs.index
assert_equal(actual, expected)
expected = []
actual = subvs.subvs_data
assert_not_equal(actual, expected)
def test_save_update_sub_vs(self):
with patch.object(VirtualService, "_get") as _get:
with patch.object(VirtualService, "_subvs_to_dict") as _subvs_to_dict:
_subvs_to_dict.return_value = {
"vs": "1",
"rs": "!1",
"name": "asdf",
"forward": "asdf",
"weight": "sdf",
"limit": "sdf",
"critical": "sdf",
"enable": "sdf",
}
# set the index to pretend the parent vs has already been "saved"
self.vs.index = 1
subvs = self.vs.create_sub_virtual_service()
_get.return_value = self.subvs_get_response
subvs.save(update=True)
expected = "2"
actual = subvs.index
assert_equal(actual, expected)
expected = []
actual = subvs.subvs_data
assert_not_equal(actual, expected)
class Test_get_real_servers:
def setup(self):
self.lm_info = {
"endpoint": "https://1.1.1.1:443/access",
"ip_address": "1.1.1.1",
"auth": ("bal", "2fourall"),
}
self.vs = VirtualService(self.lm_info, "1.1.1.2")
def test_data_exists(self):
with patch.object(VirtualService, 'build_real_server') as build_real_server:
with patch.object(objects, 'get_data') as get_data:
with patch.object(VirtualService, '_get'):
build_real_server.side_effect = sorted
get_data. return_value = {'Rs': ['ba', 'ed']}
res = self.vs.get_real_servers()
expected = [['a','b'], ['d','e']]
assert_equal(res, expected)
def test_no_data_exists(self):
with patch.object(VirtualService, 'build_real_server') as build_real_server:
with patch.object(objects, 'get_data') as get_data:
with patch.object(VirtualService, '_get'):
build_real_server.side_effect = sorted
get_data.return_value = {}
res = self.vs.get_real_servers()
expected = []
assert_equal(res, expected)
class Test_get_real_server:
def setup(self):
self.lm_info = {
"endpoint": "https://1.1.1.1:443/access",
"ip_address": "1.1.1.1",
"auth": ("bal", "2fourall"),
}
self.vs = VirtualService(self.lm_info, "1.1.1.2")
def test_with_index_ok(self):
with patch.object(VirtualService, 'build_real_server') as build_real_server:
with patch.object(objects, 'get_data'):
with patch.object(VirtualService, '_get'):
self.vs.index = self
build_real_server.return_value = sentinel.rs
res = self.vs.get_real_server('1.1.1.1', 80)
assert_equal(res, sentinel.rs)
def test_with_index_invalid_port(self):
with patch.object(VirtualService, 'build_real_server') as build_real_server:
with patch.object(objects, 'get_data'):
with patch.object(VirtualService, '_get'):
self.vs.index = self
build_real_server.return_value = sentinel.rs
with assert_raises(ValidationError):
self.vs.get_real_server('1.1.1.1', 'junk')
def test_without_index_ok(self):
with patch.object(VirtualService, 'build_real_server') as build_real_server:
with patch.object(objects, 'get_data'):
with patch.object(VirtualService, '_get'):
self.vs.index = None
build_real_server.return_value = sentinel.rs
res = self.vs.get_real_server('1.1.1.1', 80)
assert_equal(res, sentinel.rs)
def test_without_index_invalid_port(self):
with patch.object(VirtualService, 'build_real_server') as build_real_server:
with patch.object(objects, 'get_data'):
with patch.object(VirtualService, '_get'):
self.vs.index = None
build_real_server.return_value = sentinel.rs
with assert_raises(ValidationError):
self.vs.get_real_server('1.1.1.1.', 'junk')
class Test_build_real_server:
def setup(self):
self.lm_info = {
"endpoint": "https://1.1.1.1:443/access",
"ip_address": "1.1.1.1",
"auth": ("bal", "2fourall"),
}
self.vs = VirtualService(self.lm_info, "1.1.1.2")
def test_no_Addr(self):
server = {"Port": 80}
with assert_raises(ValidationError) as err:
self.vs.build_real_server(server)
assert_in('Addr', str(err.exception))
def test_no_Port(self):
server = {"Addr": '1.1.1.1'}
with assert_raises(ValidationError) as err:
self.vs.build_real_server(server)
assert_in('Port', str(err.exception))
def test_ok(self):
server = {"Addr": '1.1.1.1', "Port": 80}
res = self.vs.build_real_server(server)
assert_is_instance(res, RealServer)
| 70.633758
| 1,799
| 0.663736
| 2,620
| 22,179
| 5.475191
| 0.093511
| 0.011293
| 0.010457
| 0.040432
| 0.872778
| 0.838411
| 0.823074
| 0.813942
| 0.796863
| 0.792402
| 0
| 0.028939
| 0.186708
| 22,179
| 313
| 1,800
| 70.859425
| 0.766327
| 0.029262
| 0
| 0.5625
| 0
| 0.027344
| 0.520126
| 0.358232
| 0
| 0
| 0
| 0
| 0.125
| 1
| 0.109375
| false
| 0
| 0.027344
| 0
| 0.152344
| 0.011719
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
95396547c3d5c16f9ed0b9997057772e365950ae
| 1,799
|
py
|
Python
|
blog/migrations/0005_auto_20201008_1805.py
|
JiajiaHuang/smonus
|
95ec209ae3562ea73ee9ce4c22a0d3a3f0975210
|
[
"Unlicense"
] | null | null | null |
blog/migrations/0005_auto_20201008_1805.py
|
JiajiaHuang/smonus
|
95ec209ae3562ea73ee9ce4c22a0d3a3f0975210
|
[
"Unlicense"
] | null | null | null |
blog/migrations/0005_auto_20201008_1805.py
|
JiajiaHuang/smonus
|
95ec209ae3562ea73ee9ce4c22a0d3a3f0975210
|
[
"Unlicense"
] | null | null | null |
# Generated by Django 2.2.1 on 2020-10-08 10:05
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('blog', '0004_auto_20201003_2247'),
]
operations = [
migrations.AddField(
model_name='channel',
name='updated',
field=models.DateTimeField(auto_now=True, verbose_name='修改时间'),
),
migrations.AlterField(
model_name='article',
name='created',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='创建时间'),
),
migrations.AlterField(
model_name='article',
name='updated',
field=models.DateTimeField(auto_now=True, verbose_name='修改时间'),
),
migrations.AlterField(
model_name='attachment',
name='Created',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='创建时间'),
),
migrations.AlterField(
model_name='attachment',
name='Updated',
field=models.DateTimeField(auto_now=True, verbose_name='更新时间'),
),
migrations.AlterField(
model_name='channel',
name='created',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='创建时间'),
),
migrations.AlterField(
model_name='tagsmodels',
name='tags_created',
field=models.DateTimeField(default=django.utils.timezone.now, verbose_name='创建时间'),
),
migrations.AlterField(
model_name='tagsmodels',
name='tags_updated',
field=models.DateTimeField(auto_now=True, verbose_name='更新时间'),
),
]
| 32.709091
| 95
| 0.590884
| 171
| 1,799
| 6.070175
| 0.269006
| 0.069364
| 0.184971
| 0.195568
| 0.766859
| 0.766859
| 0.734104
| 0.734104
| 0.734104
| 0.734104
| 0
| 0.024238
| 0.289049
| 1,799
| 54
| 96
| 33.314815
| 0.787334
| 0.025014
| 0
| 0.729167
| 1
| 0
| 0.11016
| 0.013128
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.041667
| 0
| 0.104167
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
95e9c5585303c7bd303fc979afb0a10b96ec264e
| 3,965
|
py
|
Python
|
test/pyaz/sql/dw/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
test/pyaz/sql/dw/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | 9
|
2021-09-24T16:37:24.000Z
|
2021-12-24T00:39:19.000Z
|
test/pyaz/sql/dw/__init__.py
|
bigdatamoore/py-az-cli
|
54383a4ee7cc77556f6183e74e992eec95b28e01
|
[
"MIT"
] | null | null | null |
import json, subprocess
from ... pyaz_utils import get_cli_name, get_params
def create(__CATALOG_COLLATION=None, collation=None, __ELASTIC_POOL_ID=None, __LICENSE_TYPE=None, max_size=None, service_objective=None, __RESTORE_POINT_IN_TIME=None, __SAMPLE_NAME=None, __SKU=None, __SOURCE_DATABASE_DELETION_DATE=None, tags=None, zone_redundant=None, __AUTO_PAUSE_DELAY=None, __MIN_CAPACITY=None, __COMPUTE_MODEL=None, __READ_SCALE=None, __HIGH_AVAILABILITY_REPLICA_COUNT=None, backup_storage_redundancy=None, __MAINTENANCE_CONFIGURATION_ID=None, __IS_LEDGER_ON=None, __CAPACITY=None, __FAMILY=None, __TIER=None, name, server, resource_group, no_wait=None):
params = get_params(locals())
command = "az sql dw create " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def show(resource_group, server, name):
params = get_params(locals())
command = "az sql dw show " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def list(server, resource_group):
params = get_params(locals())
command = "az sql dw list " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def delete(resource_group, server, name, yes=None, no_wait=None):
params = get_params(locals())
command = "az sql dw delete " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def pause(name, server, resource_group):
params = get_params(locals())
command = "az sql dw pause " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def resume(name, server, resource_group):
params = get_params(locals())
command = "az sql dw resume " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
def update(resource_group, server, name, max_size=None, service_objective=None, set=None, add=None, remove=None, force_string=None, no_wait=None):
params = get_params(locals())
command = "az sql dw update " + params
print(command)
output = subprocess.run(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
stdout = output.stdout.decode("utf-8")
stderr = output.stderr.decode("utf-8")
if stdout:
return json.loads(stdout)
print(stdout)
else:
raise Exception(stderr)
print(stderr)
| 38.872549
| 575
| 0.682219
| 503
| 3,965
| 5.206759
| 0.194831
| 0.074838
| 0.053456
| 0.056128
| 0.762123
| 0.762123
| 0.73845
| 0.73845
| 0.725086
| 0.725086
| 0
| 0.00443
| 0.203026
| 3,965
| 101
| 576
| 39.257426
| 0.824367
| 0
| 0
| 0.827957
| 0
| 0
| 0.046406
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.021505
| null | null | 0.225806
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
c299dc043a3ee18eefb82d5c320399ae1ce7bb70
| 32
|
py
|
Python
|
numbamisc/utils/__init__.py
|
MSeifert04/numbamisc
|
4141ee524f077299f57d0285110debbdcdba0013
|
[
"Apache-2.0"
] | 4
|
2016-12-01T02:27:45.000Z
|
2021-09-06T11:37:37.000Z
|
numbamisc/utils/__init__.py
|
MSeifert04/numbamisc
|
4141ee524f077299f57d0285110debbdcdba0013
|
[
"Apache-2.0"
] | 1
|
2020-10-29T19:51:47.000Z
|
2020-11-04T21:14:01.000Z
|
numbamisc/utils/__init__.py
|
MSeifert04/numbamisc
|
4141ee524f077299f57d0285110debbdcdba0013
|
[
"Apache-2.0"
] | null | null | null |
from ._generatefilters import *
| 16
| 31
| 0.8125
| 3
| 32
| 8.333333
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 32
| 1
| 32
| 32
| 0.892857
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c2db62382fb08b36c1b3bbf64acecafb9ebb26a7
| 388
|
py
|
Python
|
ics_demo/dao/__init__.py
|
lielongxingkong/ics-demo
|
21a08945f3983eb409916a7380549f74e3ba5171
|
[
"MIT"
] | null | null | null |
ics_demo/dao/__init__.py
|
lielongxingkong/ics-demo
|
21a08945f3983eb409916a7380549f74e3ba5171
|
[
"MIT"
] | null | null | null |
ics_demo/dao/__init__.py
|
lielongxingkong/ics-demo
|
21a08945f3983eb409916a7380549f74e3ba5171
|
[
"MIT"
] | null | null | null |
from db import init_db
from interfaces.demo import rabbit as rabbit_dao
from interfaces.demo import carrot as carrot_dao
from interfaces.demo import carrot as corps_dao
from interfaces import host as host_dao
from interfaces import blockdevice as block_dao
from interfaces.vsan import mon as mon_dao
from interfaces.vsan import osd as osd_dao
from interfaces.vsan import vsan as vsan_dao
| 38.8
| 48
| 0.850515
| 67
| 388
| 4.791045
| 0.253731
| 0.34891
| 0.370717
| 0.224299
| 0.470405
| 0.218069
| 0.218069
| 0
| 0
| 0
| 0
| 0
| 0.134021
| 388
| 9
| 49
| 43.111111
| 0.955357
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c2e4088afea88aafa4dedd93b2474372054dc971
| 143
|
py
|
Python
|
AutomatedParking/AutomatedParking/models.py
|
COMP-SCI-72/Automated-Parking-System
|
8abc462e7d95daf485d2f99b13000ae6119331f3
|
[
"Apache-2.0"
] | null | null | null |
AutomatedParking/AutomatedParking/models.py
|
COMP-SCI-72/Automated-Parking-System
|
8abc462e7d95daf485d2f99b13000ae6119331f3
|
[
"Apache-2.0"
] | null | null | null |
AutomatedParking/AutomatedParking/models.py
|
COMP-SCI-72/Automated-Parking-System
|
8abc462e7d95daf485d2f99b13000ae6119331f3
|
[
"Apache-2.0"
] | null | null | null |
from django.db import models
class User(models.Model):
pass
class Car(models.Model):
pass
class Parking(models.Model):
pass
| 9.533333
| 28
| 0.685315
| 20
| 143
| 4.9
| 0.55
| 0.336735
| 0.459184
| 0.408163
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.223776
| 143
| 14
| 29
| 10.214286
| 0.882883
| 0
| 0
| 0.428571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.428571
| 0.142857
| 0
| 0.571429
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
6c16befe9f689bf20b5b88dc6a63119f677b759b
| 89
|
py
|
Python
|
tests/test_person.py
|
FOR-THE-HORDE-OPUS/Orgrimmar
|
5e45016e029574fbf959e122055b93ae1c300f1b
|
[
"MIT"
] | null | null | null |
tests/test_person.py
|
FOR-THE-HORDE-OPUS/Orgrimmar
|
5e45016e029574fbf959e122055b93ae1c300f1b
|
[
"MIT"
] | 5
|
2020-11-13T18:36:55.000Z
|
2022-02-10T01:12:56.000Z
|
tests/test_person.py
|
FOR-THE-HORDE-OPUS/Orgrimmar
|
5e45016e029574fbf959e122055b93ae1c300f1b
|
[
"MIT"
] | null | null | null |
def test_update_person_name():
return
def test_update_person_address():
return
| 12.714286
| 33
| 0.752809
| 12
| 89
| 5.083333
| 0.583333
| 0.229508
| 0.42623
| 0.622951
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.179775
| 89
| 6
| 34
| 14.833333
| 0.835616
| 0
| 0
| 0.5
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| true
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 9
|
666ccbc45a5c3aa3e1a6a55ff49484940ca12818
| 512
|
py
|
Python
|
accounts/models.py
|
akhilmaharana/history-and-bookmark-recommendation-app
|
55a55c5dc2f752bb83a3914b8205fd33a22acfd9
|
[
"MIT"
] | null | null | null |
accounts/models.py
|
akhilmaharana/history-and-bookmark-recommendation-app
|
55a55c5dc2f752bb83a3914b8205fd33a22acfd9
|
[
"MIT"
] | null | null | null |
accounts/models.py
|
akhilmaharana/history-and-bookmark-recommendation-app
|
55a55c5dc2f752bb83a3914b8205fd33a22acfd9
|
[
"MIT"
] | null | null | null |
from django.db import models
# Create your models here.
class Contact(models.Model):
firstName = models.CharField(max_length=100)
lastName = models.CharField(max_length=100)
countryName = models.CharField(max_length=100)
subject = models.CharField(max_length=100)
class ContactDetails(models.Model):
firstName = models.CharField(max_length=100)
lastName = models.CharField(max_length=100)
countryName = models.CharField(max_length=100)
subject = models.CharField(max_length=100)
| 36.571429
| 50
| 0.763672
| 65
| 512
| 5.892308
| 0.307692
| 0.313316
| 0.375979
| 0.501305
| 0.804178
| 0.804178
| 0.804178
| 0.804178
| 0.804178
| 0.804178
| 0
| 0.054299
| 0.136719
| 512
| 14
| 51
| 36.571429
| 0.812217
| 0.046875
| 0
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.090909
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
66b36148d799b4b6799c7cd7e0cb3d3f3368bf30
| 451
|
py
|
Python
|
tests/internal/ebs_optimized_support/test_ebs_optimized_support_unsupported_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/ebs_optimized_support/test_ebs_optimized_support_unsupported_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | null | null | null |
tests/internal/ebs_optimized_support/test_ebs_optimized_support_unsupported_auto.py
|
frolovv/aws.ec2.compare
|
582805823492f833d65c0441c4a14dce697c12aa
|
[
"Apache-2.0"
] | 1
|
2021-12-15T11:58:22.000Z
|
2021-12-15T11:58:22.000Z
|
# Testing module ebs_optimized_support.unsupported
import pytest
import ec2_compare.internal.ebs_optimized_support.unsupported
def test_get_internal_data_ebs_optimized_support_unsupported_get_instances_list():
assert len(ec2_compare.internal.ebs_optimized_support.unsupported.get_instances_list()) > 0
def test_get_internal_data_ebs_optimized_support_unsupported_get():
assert len(ec2_compare.internal.ebs_optimized_support.unsupported.get) > 0
| 45.1
| 93
| 0.882483
| 62
| 451
| 5.919355
| 0.322581
| 0.196185
| 0.310627
| 0.490463
| 0.828338
| 0.828338
| 0.828338
| 0.626703
| 0.626703
| 0.626703
| 0
| 0.011765
| 0.05765
| 451
| 9
| 94
| 50.111111
| 0.851765
| 0.10643
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
|
0
| 10
|
66eb7502095913e7cbc3b2898e48e7b052c5258c
| 230
|
py
|
Python
|
pyforchange/egg/__init__.py
|
PythonForChange/pyforchange
|
2cc5afef227ac68147e291e447c57924586a0b12
|
[
"MIT"
] | 1
|
2021-06-07T02:10:41.000Z
|
2021-06-07T02:10:41.000Z
|
pyforchange/egg/__init__.py
|
PythonForChange/pyforchange
|
2cc5afef227ac68147e291e447c57924586a0b12
|
[
"MIT"
] | null | null | null |
pyforchange/egg/__init__.py
|
PythonForChange/pyforchange
|
2cc5afef227ac68147e291e447c57924586a0b12
|
[
"MIT"
] | null | null | null |
from pyforchange.egg.resources.modules import *
from pyforchange.egg.resources.console import *
from pyforchange.egg.resources.constants import *
from pyforchange.egg.resources.extensions import *
from pyforchange.egg.app import *
| 46
| 50
| 0.834783
| 29
| 230
| 6.62069
| 0.344828
| 0.390625
| 0.46875
| 0.5625
| 0.515625
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082609
| 230
| 5
| 51
| 46
| 0.909953
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
06fd8faf81245fda5a16cd3c3df799f903aca29c
| 8,002
|
py
|
Python
|
test/const.py
|
mikeshultz/py4byte
|
ef4afef47232aa010a409ffcfa41b662ace4a449
|
[
"MIT"
] | null | null | null |
test/const.py
|
mikeshultz/py4byte
|
ef4afef47232aa010a409ffcfa41b662ace4a449
|
[
"MIT"
] | null | null | null |
test/const.py
|
mikeshultz/py4byte
|
ef4afef47232aa010a409ffcfa41b662ace4a449
|
[
"MIT"
] | null | null | null |
# flake8: noqa
SWAP_TRANSACTION = {
'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',
'blockNumber': 11325697,
'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b',
'gas': 203049,
'gasPrice': 69000000000,
'hash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',
'input': '0x38ed173900000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000101e38e99b52f2ce48c00000000000000000000000000000000000000000000000000000000000000a00000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b000000000000000000000000000000000000000000000000000000005fbdf16400000000000000000000000000000000000000000000000000000000000000030000000000000000000000008207c1ffc5b6804f6024322ccf34f29c3541ae26000000000000000000000000c02aaa39b223fe8d0a0e5c4f27ead9083c756cc20000000000000000000000006b175474e89094c44da98b954eedeac495271d0f',
'nonce': 85,
'r': '0x8a4e290dd3ab0440186b84e9f8b253e9e6b04389a3f3214d9da59d7c0533ca85',
's': '0x2ab840455ab71a77cd426fab7f2cd9307e3bd0a7699a73d49e807d68ca3d0e18',
'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D',
'transactionIndex': 8,
'v': 37,
'value': 0
}
SWAP_RECEIPT = {
'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',
'blockNumber': 11325697,
'contractAddress': None,
'cumulativeGasUsed': 872286,
'from': '0x9283099A29556fCF8fFF5b2Cea2D4F67CB7A7A8b',
'gasUsed': 174314,
'logs': [
{
'address': '0x8207c1FfC5B6804F6024322CcF34F29c3541Ae26',
'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',
'blockNumber': 11325697,
'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000',
'logIndex': 18,
'removed': False,
'topics': [
'0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef',
'0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b',
'0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185'
],
'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',
'transactionIndex': 8
},
{
'address': '0xC02aaA39b223FE8D0A0e5C4F27eAD9083C756Cc2',
'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',
'blockNumber': 11325697,
'data': '0x0000000000000000000000000000000000000000000000007291d30a40c19a3b',
'logIndex': 19,
'removed': False,
'topics': [
'0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef',
'0x000000000000000000000000ce2cc0513634cef3a7c9c257e294ef5e3092f185',
'0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11'
],
'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',
'transactionIndex': 8
},
{
'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185',
'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',
'blockNumber': 11325697,
'data': '0x00000000000000000000000000000000000000000001c8b8cbb210149443c7c300000000000000000000000000000000000000000000001fd2b17e29f3c7fe13',
'logIndex': 20,
'removed': False,
'topics': [
'0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'
],
'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',
'transactionIndex': 8
},
{
'address': '0xce2Cc0513634CEf3a7C9C257E294EF5E3092f185',
'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',
'blockNumber': 11325697,
'data': '0x00000000000000000000000000000000000000000000065a4da25d3016c00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b',
'logIndex': 21,
'removed': False,
'topics': [
'0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822',
'0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d',
'0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11'
],
'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',
'transactionIndex': 8
},
{
'address': '0x6B175474E89094C44Da98b954EedeAC495271d0F',
'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',
'blockNumber': 11325697,
'data': '0x0000000000000000000000000000000000000000000001032c7405f5694857e5',
'logIndex': 22,
'removed': False,
'topics': [
'0xddf252ad1be2c89b69c2b068fc378daa952ba7f163c4a11628f55a4df523b3ef',
'0x000000000000000000000000a478c2975ab1ea89e8196811f51a7b7ade33eb11',
'0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b'
],
'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',
'transactionIndex': 8
},
{
'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11',
'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',
'blockNumber': 11325697,
'data': '0x00000000000000000000000000000000000000000034faa46be5604324421cca000000000000000000000000000000000000000000001759ed6d2f07716bdd40',
'logIndex': 23,
'removed': False,
'topics': ['0x1c411e9a96e071241c2f21f7726b17ae89e3cab4c78be50e062b03a9fffbbad1'],
'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',
'transactionIndex': 8
},
{
'address': '0xA478c2975Ab1Ea89e8196811F51A7B7Ade33eB11',
'blockHash': '0x3f43cba31e73252412c88d42f80b64e3c81ed99a1ef3743d14c891ed0ef54ab3',
'blockNumber': 11325697,
'data': '0x00000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000007291d30a40c19a3b0000000000000000000000000000000000000000000001032c7405f5694857e50000000000000000000000000000000000000000000000000000000000000000',
'logIndex': 24,
'removed': False,
'topics': [
'0xd78ad95fa46c994b6551d0da85fc275fe613ce37657fb8d5e3d130840159d822',
'0x0000000000000000000000007a250d5630b4cf539739df2c5dacb4c659f2488d',
'0x0000000000000000000000009283099a29556fcf8fff5b2cea2d4f67cb7a7a8b'
],
'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',
'transactionIndex': 8
}
],
'logsBloom': '0x00200000400000000000000080010000000000000000000000010000000000001000000000000000000000000000000012000000080000001000000000000000000000000200000000080008000000200000000000001000000000000000000011000000000080000000000000000000000000000000000000000010000000000080000000000000004000000000000000000000000000080000004000000000000000000000000040200000100000000000000000000100000200000000000000000002000000000000080000400000000002000000001000000000000020000000200000000000000000000000000000000000000000000000000000000000',
'status': 1,
'to': '0x7a250d5630B4cF539739dF2C5dAcb4c659F2488D',
'transactionHash': '0x32e97818ae1732c0cdc387452e24bd3700a610070b2b59de04d69f82c3f73364',
'transactionIndex': 8
}
| 60.165414
| 602
| 0.744314
| 193
| 8,002
| 30.849741
| 0.398964
| 0.113369
| 0.129997
| 0.142089
| 0.388646
| 0.232617
| 0.192812
| 0.192812
| 0.192812
| 0.164595
| 0
| 0.612425
| 0.191327
| 8,002
| 132
| 603
| 60.621212
| 0.30768
| 0.0015
| 0
| 0.561538
| 0
| 0
| 0.719705
| 0.623185
| 0
| 1
| 0.623185
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b096f70b1f0bbcc61fe058b62a9b67a7ae64cdfe
| 260
|
py
|
Python
|
password_security/models/__init__.py
|
juazisco/gestion_rifa
|
bce6b75f17cb5ab2df7e2f7dd5141fc85a1a5bfb
|
[
"MIT"
] | null | null | null |
password_security/models/__init__.py
|
juazisco/gestion_rifa
|
bce6b75f17cb5ab2df7e2f7dd5141fc85a1a5bfb
|
[
"MIT"
] | null | null | null |
password_security/models/__init__.py
|
juazisco/gestion_rifa
|
bce6b75f17cb5ab2df7e2f7dd5141fc85a1a5bfb
|
[
"MIT"
] | null | null | null |
# Copyright 2015 LasLabs Inc.
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html).
from . import res_users # noqa
from . import res_company # noqa
from . import res_users_pass_history # noqa
from . import res_config_settings # noqa
| 32.5
| 68
| 0.726923
| 40
| 260
| 4.55
| 0.65
| 0.21978
| 0.285714
| 0.28022
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.028169
| 0.180769
| 260
| 7
| 69
| 37.142857
| 0.826291
| 0.438462
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 8
|
b0c0cdf094c977079cc2ff8a28be12ab38f6d6ea
| 67,749
|
py
|
Python
|
python/target_selection/cartons/mwm_yso.py
|
sdss/target_selection
|
7196bf1491c4e9c18140301c7001e503f391a8e1
|
[
"BSD-3-Clause"
] | 3
|
2020-07-07T01:38:59.000Z
|
2020-11-24T21:46:58.000Z
|
python/target_selection/cartons/mwm_yso.py
|
sdss/target_selection
|
7196bf1491c4e9c18140301c7001e503f391a8e1
|
[
"BSD-3-Clause"
] | 26
|
2020-05-28T07:18:54.000Z
|
2021-11-30T18:36:10.000Z
|
python/target_selection/cartons/mwm_yso.py
|
sdss/target_selection
|
7196bf1491c4e9c18140301c7001e503f391a8e1
|
[
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# @Author: Pramod Gupta (psgupta@uw.edu)
# @Date: 2020-06-10
# @Filename: mwm_yso.py
# @License: BSD 3-clause (http://www.opensource.org/licenses/BSD-3-Clause)
import peewee
from sdssdb.peewee.sdss5db.catalogdb import (MIPSGAL, AllWise, Catalog,
CatalogToTIC_v8, Gaia_DR2,
Sagitta, TIC_v8, TwoMassPSC,
YSO_Clustering, Zari18pms)
from target_selection.cartons import BaseCarton
from target_selection.exceptions import TargetSelectionError
# See catalog.py for the name of peewee model names corresponding
# to postgres table names:
# https://github.com/sdss/sdssdb/blob/master/python/sdssdb/peewee/sdss5db/catalogdb.py
class MWM_YSO_Disk_APOGEE_Carton(BaseCarton):
"""YSOs - Disk APOGEE (IR excess).
Shorthand name: mwm_yso_disk_apogee
old class name: MWM_YSO_S1_Carton
old shorthand name: mwm_yso_s1
Simplified Description of selection criteria:
selection of YSOs based on IR excess,
with WISE colors W1-W2>0.25, W2-W3>0.5, W3-W4>1.5,
closer than parallax>0.3, and brighter than H<13
(should have ~21.5K sources)
Wiki page:
https://wiki.sdss.org/display/MWM/YSO+selection+function
Additional source catalogs needed: Gaia, 2mass, allwise
Additional cross-matching needed:
Note: Using the Gaia xmatch somehow misses half the sources.
Selection was done on the allwise catalog that
had 2mass photometry,
and then the resulting selection was crossmatched against against
Gaia with 1" search radius.
Return columns: Gaia id, 2mass id, allwise id, G, BP, RP,
J, H, K, W1, W2, W3, W4,parallax
cadence options for these targets
(list all options,
even though no single target will receive more than one):
Pseudo SQL (optional):
Implementation: h_m<13 and w1mpro-w2mpro>0.25 and
w2mpro-w3mpro>0.5 and w3mpro-w4mpro>1.5 and parallax>0.3
"""
name = 'mwm_yso_disk_apogee'
category = 'science'
instrument = 'APOGEE'
cadence = 'bright_3x1'
program = 'mwm_yso'
mapper = 'MWM'
priority = 2700
def build_query(self, version_id, query_region=None):
query = (CatalogToTIC_v8
.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
TwoMassPSC.pts_key,
TwoMassPSC.designation.alias('twomass_psc_designation'),
AllWise.designation.alias('allwise_designation'),
Gaia_DR2.phot_g_mean_mag, Gaia_DR2.phot_bp_mean_mag,
Gaia_DR2.phot_rp_mean_mag.alias('gaia_dr2_rp'),
TwoMassPSC.j_m, TwoMassPSC.h_m,
TwoMassPSC.k_m,
Gaia_DR2.parallax)
.join(TIC_v8, on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.join(Gaia_DR2, on=(TIC_v8.gaia_int == Gaia_DR2.source_id))
.switch(TIC_v8)
.join(TwoMassPSC, on=(TIC_v8.twomass_psc == TwoMassPSC.designation))
.switch(TIC_v8)
.join(AllWise, on=(TIC_v8.allwise == AllWise.designation))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
TwoMassPSC.h_m < 13,
(AllWise.w1mpro - AllWise.w2mpro) > 0.25,
(AllWise.w2mpro - AllWise.w3mpro) > 0.50,
(AllWise.w3mpro - AllWise.w4mpro) > 1.50,
Gaia_DR2.parallax > 0.3))
# Gaia_DR2 pweewee model class corresponds to
# table catalogdb.gaia_dr2_source.
#
# All values of TIC_v8.plx (for non-null entries) are not the same as
# values of Gaia_DR2.parallax.
# Hence, in the above query, we cannot use TIC_v8.plx instead
# of Gaia_DR2.parallax.
if query_region:
query = (query
.join_from(CatalogToTIC_v8, Catalog)
.where(peewee.fn.q3c_radial_query(Catalog.ra,
Catalog.dec,
query_region[0],
query_region[1],
query_region[2])))
return query
class MWM_YSO_Disk_BOSS_Carton(BaseCarton):
"""YSOs - Disk BOSS (IR excess).
Shorthand name: mwm_yso_disk_boss
old class name: MWM_YSO_S1_Carton
old shorthand name: mwm_yso_s1
Simplified Description of selection criteria:
selection of YSOs based on IR excess,
with WISE colors W1-W2>0.25, W2-W3>0.5, W3-W4>1.5,
closer than parallax>0.3, and brighter than H<13
(should have ~21.5K sources)
Wiki page:
https://wiki.sdss.org/display/MWM/YSO+selection+function
Additional source catalogs needed: Gaia, 2mass, allwise
Additional cross-matching needed:
Note: Using the Gaia xmatch somehow misses half the sources.
Selection was done on the allwise catalog that
had 2mass photometry,
and then the resulting selection was crossmatched against against
Gaia with 1" search radius.
Return columns: Gaia id, 2mass id, allwise id, G, BP, RP,
J, H, K, W1, W2, W3, W4,parallax
cadence options for these targets
(list all options,
even though no single target will receive more than one):
boss_bright_3x1 if RP<14.76 |
boss_bright_4x1 if RP<15.075 |
boss_bright_5x1 if RP<15.29 |
boss_bright_6x1 if RP<15.5
Pseudo SQL (optional):
Implementation: phot_rp_mean_mag<15.5 and w1mpro-w2mpro>0.25 and
w2mpro-w3mpro>0.5 and w3mpro-w4mpro>1.5 and parallax>0.3
Comments: Split from mwm_yso_s1 to request BOSS observations,
same color selection but assigning cadence and faint limit for carton based
on RP instead of H
"""
name = 'mwm_yso_disk_boss'
category = 'science'
instrument = None # instrument is set in post_process()
cadence = None # cadence is set in post_process()
program = 'mwm_yso'
mapper = 'MWM'
priority = 2700
def build_query(self, version_id, query_region=None):
query = (CatalogToTIC_v8
.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
TwoMassPSC.pts_key,
TwoMassPSC.designation.alias('twomass_psc_designation'),
AllWise.designation.alias('allwise_designation'),
Gaia_DR2.phot_g_mean_mag, Gaia_DR2.phot_bp_mean_mag,
Gaia_DR2.phot_rp_mean_mag.alias('gaia_dr2_rp'),
TwoMassPSC.j_m, TwoMassPSC.h_m,
TwoMassPSC.k_m,
Gaia_DR2.parallax)
.join(TIC_v8, on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.join(Gaia_DR2, on=(TIC_v8.gaia_int == Gaia_DR2.source_id))
.switch(TIC_v8)
.join(TwoMassPSC, on=(TIC_v8.twomass_psc == TwoMassPSC.designation))
.switch(TIC_v8)
.join(AllWise, on=(TIC_v8.allwise == AllWise.designation))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
Gaia_DR2.phot_rp_mean_mag < 15.5,
(AllWise.w1mpro - AllWise.w2mpro) > 0.25,
(AllWise.w2mpro - AllWise.w3mpro) > 0.50,
(AllWise.w3mpro - AllWise.w4mpro) > 1.50,
Gaia_DR2.parallax > 0.3))
# Gaia_DR2 pweewee model class corresponds to
# table catalogdb.gaia_dr2_source.
#
# All values of TIC_v8.plx (for non-null entries) are not the same as
# values of Gaia_DR2.parallax.
# Hence, in the above query, we cannot use TIC_v8.plx instead
# of Gaia_DR2.parallax.
if query_region:
query = (query
.join_from(CatalogToTIC_v8, Catalog)
.where(peewee.fn.q3c_radial_query(Catalog.ra,
Catalog.dec,
query_region[0],
query_region[1],
query_region[2])))
return query
def post_process(self, model):
"""
cadence options for these targets:
boss_bright_3x1 if RP<14.76 |
boss_bright_4x1 if RP<15.075 |
boss_bright_5x1 if RP<15.29 |
boss_bright_6x1 if RP<15.5
"""
cursor = self.database.execute_sql(
"select catalogid, gaia_dr2_rp from " +
" sandbox.temp_mwm_yso_disk_boss ;")
output = cursor.fetchall()
for i in range(len(output)):
current_catalogid = output[i][0]
current_rp = output[i][1]
if(current_rp < 14.76):
current_instrument = 'BOSS'
current_cadence = 'bright_3x1'
elif(current_rp < 15.075):
current_instrument = 'BOSS'
current_cadence = 'bright_4x1'
elif(current_rp < 15.29):
current_instrument = 'BOSS'
current_cadence = 'bright_5x1'
elif(current_rp < 15.5):
current_instrument = 'BOSS'
current_cadence = 'bright_6x1'
else:
# All cases should be covered above so we should not get here.
current_instrument = None
current_cadence = None
raise TargetSelectionError('error in mwm_yso_disk_boss ' +
'post_process(): ' +
'instrument = None, cadence= None')
if current_instrument is not None:
self.database.execute_sql(
" update sandbox.temp_mwm_yso_disk_boss " +
" set instrument = '" + current_instrument + "'"
" where catalogid = " + str(current_catalogid) + ";")
if current_cadence is not None:
self.database.execute_sql(
" update sandbox.temp_mwm_yso_disk_boss " +
" set cadence = '" + current_cadence + "'"
" where catalogid = " + str(current_catalogid) + ";")
class MWM_YSO_Embedded_APOGEE_Carton(BaseCarton):
"""YSOs - Embedded APOGEE (optically invisible).
Shorthand name: mwm_yso_embedded_apogee
old class name: MWM_YSO_S2_Carton
old shorthand name: mwm_yso_s2
Simplified Description of selection criteria:
selection of YSOs, brighter than H<13, fainter than G>15 or
without gaia detection,
colors J-H>0,5, W1-W2>0.5, W2-W3>1, W3-W4>1.5, and
relates (W3-W4)>(W1-W2)*0.5+1.1
(should have ~11.6K sources)
Wiki page:
https://wiki.sdss.org/display/MWM/YSO+selection+function
Additional source catalogs needed: 2mass+allwise, gaia
(allow sources that lack gaia xmatch)
Additional cross-matching needed:
Note: Using the Gaia xmatch somehow misses half the sources.
Selection was done on the allwise catalog
that had 2mass photometry,
and then the resulting selection was crossmatched
against against Gaia with 1" search radius.
Return columns: Gaia id, 2mass id, allwise id, G, BP, RP,
J, H, K, W1, W2, W3, W4
cadence options for these targets
(list all options,
even though no single target will receive more than one):
Pseudo SQL (optional):
Implementation: h_m<13 and
(phot_g_mean_mag>18.5 or phot_g_mean_mag is null)
and j_m-h_m>1
and h_m-ks_m>0.5
and w1mpro-w2mpro>0.5
and w2mpro-w3mpro>1
and w3mpro-w4mpro>1.5
and w3mpro-w4mpro>(w1mpro-w2mpro)*0.8+1.1
"""
name = 'mwm_yso_embedded_apogee'
category = 'science'
instrument = 'APOGEE'
cadence = 'bright_3x1'
program = 'mwm_yso'
mapper = 'MWM'
priority = 2700
def build_query(self, version_id, query_region=None):
query = (AllWise
.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
TwoMassPSC.pts_key,
TwoMassPSC.designation.alias('twomass_psc_designation'),
AllWise.designation.alias('allwise_designation'),
Gaia_DR2.phot_g_mean_mag, Gaia_DR2.phot_bp_mean_mag,
Gaia_DR2.phot_rp_mean_mag.alias('gaia_dr2_rp'),
TwoMassPSC.j_m, TwoMassPSC.h_m,
TwoMassPSC.k_m,
Gaia_DR2.parallax)
.join(TIC_v8, on=(TIC_v8.allwise == AllWise.designation))
.join(TwoMassPSC,
on=(TIC_v8.twomass_psc == TwoMassPSC.designation))
.switch(TIC_v8)
.join(Gaia_DR2, peewee.JOIN.LEFT_OUTER,
on=(TIC_v8.gaia_int == Gaia_DR2.source_id))
.switch(TIC_v8)
.join(CatalogToTIC_v8,
on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
TwoMassPSC.h_m < 13,
(Gaia_DR2.phot_g_mean_mag > 18.5) |
(Gaia_DR2.phot_g_mean_mag >> None),
(AllWise.j_m_2mass - AllWise.h_m_2mass) > 1.0,
(AllWise.h_m_2mass - AllWise.k_m_2mass) > 0.5,
(AllWise.w1mpro - AllWise.w2mpro) > 0.50,
(AllWise.w2mpro - AllWise.w3mpro) > 1.00,
(AllWise.w3mpro - AllWise.w4mpro) > 1.50,
(AllWise.w3mpro - AllWise.w4mpro) >
(AllWise.w1mpro - AllWise.w2mpro) * 0.8 + 1.1))
if query_region:
query = (query
.join_from(CatalogToTIC_v8, Catalog)
.where(peewee.fn.q3c_radial_query(Catalog.ra,
Catalog.dec,
query_region[0],
query_region[1],
query_region[2])))
return query
class MWM_YSO_Nebula_APOGEE_Carton(BaseCarton):
"""YSOs - Nebula APOGEE(optically invisible, WISE saturated).
Shorthand name: mwm_yso_nebula_apogee
old class name: MWM_YSO_S2_5_Carton
old shorthand name: mwm_yso_s2_5
Simplified Description of selection criteria:
selection of YSOs, brighter than H<15,
saturated (blank) W4 with W2-W3>4,
or saturated W3 and W2, with J-H>1.1.
Some contaminants from scanning are
filtered on the plane of the sky:
all the targets should be within 5 deg of the plane+
few sources that can be located
further south of the plane if l>180
(should have ~1.2K sources)
Wiki page:
https://wiki.sdss.org/display/MWM/YSO+selection+function
Additional source catalogs needed: 2mass, allwise
Additional cross-matching needed:
Return columns: 2mass id, allwise id, J, H, K, W1, W2, W3, W4
cadence options for these targets
(list all options,
even though no single target will receive more than one):
Pseudo SQL (optional):
Implementation: h_m<13 and
(w2mpro-w3mpro>4 and w4mpro is null) or
(w3mpro is null and w4mpro is null and j_m-h_m>1.1)
and (b>-5 or l>180) and b<-5
"""
name = 'mwm_yso_nebula_apogee'
category = 'science'
instrument = 'APOGEE'
cadence = 'bright_3x1'
program = 'mwm_yso'
mapper = 'MWM'
priority = 2700
# Above implementation has below clause
# and (b>-5 or l>180) and b<-5
# Replace (b>-5 or l>180) and b<-5 as below based on the text.
# In words:
# all the targets should be within 5 deg of the plane+
# few sources that can be
# located further south of the plane if l>180
# Hence:
# ((b>-5) and (b<5)) or ((b<-5) and (l > 180))
# l, b in Gaia_DR2 are gallong and gallat in TIC_v8.
# We are using the values from Gaia since
# TIC propagates the coordinates back to epoch 2000.0
# (b>-5 or l>180) and b<-5
# S2_5 query below has the same part before where() as S2 query.
def build_query(self, version_id, query_region=None):
query = (AllWise
.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
TwoMassPSC.pts_key,
TwoMassPSC.designation.alias('twomass_psc_designation'),
AllWise.designation.alias('allwise_designation'),
Gaia_DR2.phot_g_mean_mag, Gaia_DR2.phot_bp_mean_mag,
Gaia_DR2.phot_rp_mean_mag.alias('gaia_dr2_rp'),
TwoMassPSC.j_m, TwoMassPSC.h_m,
TwoMassPSC.k_m,
Gaia_DR2.parallax)
.join(TIC_v8, on=(TIC_v8.allwise == AllWise.designation))
.join(TwoMassPSC,
on=(TIC_v8.twomass_psc == TwoMassPSC.designation))
.switch(TIC_v8)
.join(Gaia_DR2, peewee.JOIN.LEFT_OUTER,
on=(TIC_v8.gaia_int == Gaia_DR2.source_id))
.switch(TIC_v8)
.join(CatalogToTIC_v8,
on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
TwoMassPSC.h_m < 13,
(((AllWise.w2mpro - AllWise.w3mpro) > 4) &
(AllWise.w4mpro >> None)) |
((AllWise.w3mpro >> None) &
(AllWise.w4mpro >> None) &
((AllWise.j_m_2mass - AllWise.h_m_2mass) > 1.1)),
((Gaia_DR2.b > -5) & (Gaia_DR2.b < 5)) |
((Gaia_DR2.b < -5) & (Gaia_DR2.l > 180)) |
((Gaia_DR2.b >> None) & (Gaia_DR2.l >> None))))
if query_region:
query = (query
.join_from(CatalogToTIC_v8, Catalog)
.where(peewee.fn.q3c_radial_query(Catalog.ra,
Catalog.dec,
query_region[0],
query_region[1],
query_region[2])))
return query
class MWM_YSO_Variable_APOGEE_Carton(BaseCarton):
"""YSOs - Variable APOGEE (pre-main sequence optical variables).
Shorthand name: mwm_yso_variable_apogee
old class name: MWM_YSO_S3_Carton
old shorthand name: mwm_yso_s3
Simplified Description of selection criteria:
selection of YSOs brighter than H<13, closer than parallax>0.3.
Filter on the position of the HR diagram to
select cool pre-main sequence stars,
with BP-RP>13, (BP-RP)*2.5+2.5>M_G, (BP-RP)*2.5-1<M_G,
requiring variability in g,bp,rp>0.02
(with var_x defined as
sqrt(phot_x_n_obs)/phot_x_mean_flux_over_error),
have relations in variability of
var_g<var_bp<var_g^0.75, 0.75*var_g<var_rp<var_g^0.95,
and log10(var_bp)*5+11<M_BP, in which M_x is the absolute mag
(should have ~52.7K sources)
Wiki page:
https://wiki.sdss.org/display/MWM/YSO+selection+function
Additional source catalogs needed: 2mass, gaia
Additional cross-matching needed:
Return columns: Gaia id, 2mass id, G, BP, RP, J, H, K, parallax
cadence options for these targets
(list all options,
even though no single target will receive more than one):
Pseudo SQL (optional):
Implementation:
phot_g_mean_mag < 18.5 and h_m <13 and parallax >0.3 and
bp_rp*2.5+2.5 > phot_g_mean_mag-5*(log10(1000/parallax)-1) and
bp_rp*2.5-1 < phot_g_mean_mag-5*(log10(1000/parallax)-1) and
sqrt(phot_bp_n_obs)/phot_bp_mean_flux_over_error>
sqrt(phot_g_n_obs)/phot_g_mean_flux_over_error and
sqrt(phot_rp_n_obs)/phot_rp_mean_flux_over_error>
sqrt(phot_g_n_obs)/phot_g_mean_flux_over_error*0.75 and
sqrt(phot_bp_n_obs)/phot_bp_mean_flux_over_error<
power(sqrt(phot_g_n_obs)/phot_g_mean_flux_over_error,0.75) and
sqrt(phot_rp_n_obs)/phot_rp_mean_flux_over_error<
power(sqrt(phot_g_n_obs)/phot_g_mean_flux_over_error,0.95) and
log10(sqrt(phot_bp_n_obs)/phot_bp_mean_flux_over_error)*5+11<
phot_bp_mean_mag-5*(log10(1000/parallax)-1) and
bp_rp>1.3 and sqrt(phot_g_n_obs)/phot_g_mean_flux_over_error>0.02
and
sqrt(phot_bp_n_obs)/phot_bp_mean_flux_over_error>0.02 and
sqrt(phot_rp_n_obs)/phot_rp_mean_flux_over_error>0.02
"""
name = 'mwm_yso_variable_apogee'
category = 'science'
instrument = 'APOGEE'
cadence = 'bright_3x1'
program = 'mwm_yso'
mapper = 'MWM'
priority = 2700
def build_query(self, version_id, query_region=None):
query = (CatalogToTIC_v8
.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
TwoMassPSC.pts_key,
TwoMassPSC.designation.alias('twomass_psc_designation'),
Gaia_DR2.phot_g_mean_mag, Gaia_DR2.phot_bp_mean_mag,
Gaia_DR2.phot_rp_mean_mag.alias('gaia_dr2_rp'),
TwoMassPSC.j_m, TwoMassPSC.h_m,
TwoMassPSC.k_m,
Gaia_DR2.parallax)
.join(TIC_v8, on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.join(TwoMassPSC, on=(TIC_v8.twomass_psc == TwoMassPSC.designation))
.switch(TIC_v8)
.join(Gaia_DR2, on=(TIC_v8.gaia_int == Gaia_DR2.source_id))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
Gaia_DR2.phot_g_mean_mag < 18.5,
TwoMassPSC.h_m < 13,
Gaia_DR2.parallax > 0.3,
Gaia_DR2.bp_rp * 2.5 + 2.5 >
Gaia_DR2.phot_g_mean_mag -
5 * (peewee.fn.log(1000 / Gaia_DR2.parallax) - 1),
Gaia_DR2.bp_rp * 2.5 - 1 <
Gaia_DR2.phot_g_mean_mag -
5 * (peewee.fn.log(1000 / Gaia_DR2.parallax) - 1),
peewee.fn.sqrt(Gaia_DR2.phot_bp_n_obs) /
Gaia_DR2.phot_bp_mean_flux_over_error >
peewee.fn.sqrt(Gaia_DR2.phot_g_n_obs) /
Gaia_DR2.phot_g_mean_flux_over_error,
peewee.fn.sqrt(Gaia_DR2.phot_rp_n_obs) /
Gaia_DR2.phot_rp_mean_flux_over_error >
peewee.fn.sqrt(Gaia_DR2.phot_g_n_obs) /
Gaia_DR2.phot_g_mean_flux_over_error * 0.75,
peewee.fn.sqrt(Gaia_DR2.phot_bp_n_obs) /
Gaia_DR2.phot_bp_mean_flux_over_error <
peewee.fn.power(
peewee.fn.sqrt(Gaia_DR2.phot_g_n_obs) /
Gaia_DR2.phot_g_mean_flux_over_error, 0.75),
peewee.fn.sqrt(Gaia_DR2.phot_rp_n_obs) /
Gaia_DR2.phot_rp_mean_flux_over_error <
peewee.fn.power(
peewee.fn.sqrt(Gaia_DR2.phot_g_n_obs) /
Gaia_DR2.phot_g_mean_flux_over_error, 0.95),
peewee.fn.log(
peewee.fn.sqrt(Gaia_DR2.phot_bp_n_obs) /
Gaia_DR2.phot_bp_mean_flux_over_error) * 5 + 11 <
Gaia_DR2.phot_bp_mean_mag -
5 * (peewee.fn.log(1000 / Gaia_DR2.parallax) - 1),
Gaia_DR2.bp_rp > 1.3,
peewee.fn.sqrt(Gaia_DR2.phot_g_n_obs) /
Gaia_DR2.phot_g_mean_flux_over_error > 0.02,
peewee.fn.sqrt(Gaia_DR2.phot_bp_n_obs) /
Gaia_DR2.phot_bp_mean_flux_over_error > 0.02,
peewee.fn.sqrt(Gaia_DR2.phot_rp_n_obs) /
Gaia_DR2.phot_rp_mean_flux_over_error > 0.02))
if query_region:
query = (query
.join_from(CatalogToTIC_v8, Catalog)
.where(peewee.fn.q3c_radial_query(Catalog.ra,
Catalog.dec,
query_region[0],
query_region[1],
query_region[2])))
return query
class MWM_YSO_Variable_BOSS_Carton(BaseCarton):
"""YSOs - Variable BOSS (pre-main sequence optical variables).
Shorthand name: mwm_yso_variable_boss
old class name: MWM_YSO_S3_Carton
old shorthand name: mwm_yso_s3
Simplified Description of selection criteria:
selection of YSOs brighter than H<13, closer than parallax>0.3.
Filter on the position of the HR diagram to
select cool pre-main sequence stars,
with BP-RP>13, (BP-RP)*2.5+2.5>M_G, (BP-RP)*2.5-1<M_G,
requiring variability in g,bp,rp>0.02
(with var_x defined as
sqrt(phot_x_n_obs)/phot_x_mean_flux_over_error),
have relations in variability of
var_g<var_bp<var_g^0.75, 0.75*var_g<var_rp<var_g^0.95,
and log10(var_bp)*5+11<M_BP, in which M_x is the absolute mag
(should have ~52.7K sources)
Wiki page:
https://wiki.sdss.org/display/MWM/YSO+selection+function
Additional source catalogs needed: 2mass, gaia
Additional cross-matching needed:
Return columns: Gaia id, 2mass id, G, BP, RP, J, H, K, parallax
cadence options for these targets
(list all options,
even though no single target will receive more than one):
boss_bright_3x1 if RP<14.76 |
boss_bright_4x1 if RP<15.075 |
boss_bright_5x1 if RP<15.29 |
boss_bright_6x1 if RP<15.5
Pseudo SQL (optional):
Implementation:
phot_rp_mean_mag<15.5 and phot_g_mean_mag < 18.5 and h_m <13 and parallax >0.3 and
bp_rp*2.5+2.5 > phot_g_mean_mag-5*(log10(1000/parallax)-1) and
bp_rp*2.5-1 < phot_g_mean_mag-5*(log10(1000/parallax)-1) and
sqrt(phot_bp_n_obs)/phot_bp_mean_flux_over_error>
sqrt(phot_g_n_obs)/phot_g_mean_flux_over_error and
sqrt(phot_rp_n_obs)/phot_rp_mean_flux_over_error>
sqrt(phot_g_n_obs)/phot_g_mean_flux_over_error*0.75 and
sqrt(phot_bp_n_obs)/phot_bp_mean_flux_over_error<
power(sqrt(phot_g_n_obs)/phot_g_mean_flux_over_error,0.75) and
sqrt(phot_rp_n_obs)/phot_rp_mean_flux_over_error<
power(sqrt(phot_g_n_obs)/phot_g_mean_flux_over_error,0.95) and
log10(sqrt(phot_bp_n_obs)/phot_bp_mean_flux_over_error)*5+11<
phot_bp_mean_mag-5*(log10(1000/parallax)-1) and
bp_rp>1.3 and sqrt(phot_g_n_obs)/phot_g_mean_flux_over_error>0.02
and
sqrt(phot_bp_n_obs)/phot_bp_mean_flux_over_error>0.02 and
sqrt(phot_rp_n_obs)/phot_rp_mean_flux_over_error>0.02
Comments: Split from mwm_yso_s3 to request BOSS observations,
RP magnitude check added to the previous selection
"""
name = 'mwm_yso_variable_boss'
category = 'science'
instrument = None # instrument is set in post_process()
cadence = None # cadence is set in post_process()
program = 'mwm_yso'
mapper = 'MWM'
priority = 2700
def build_query(self, version_id, query_region=None):
query = (CatalogToTIC_v8
.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
TwoMassPSC.pts_key,
TwoMassPSC.designation.alias('twomass_psc_designation'),
Gaia_DR2.phot_g_mean_mag, Gaia_DR2.phot_bp_mean_mag,
Gaia_DR2.phot_rp_mean_mag.alias('gaia_dr2_rp'),
TwoMassPSC.j_m, TwoMassPSC.h_m,
TwoMassPSC.k_m, Gaia_DR2.parallax)
.join(TIC_v8, on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.join(TwoMassPSC, on=(TIC_v8.twomass_psc == TwoMassPSC.designation))
.switch(TIC_v8)
.join(Gaia_DR2, on=(TIC_v8.gaia_int == Gaia_DR2.source_id))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
Gaia_DR2.phot_rp_mean_mag < 15.5,
Gaia_DR2.phot_g_mean_mag < 18.5,
TwoMassPSC.h_m < 13,
Gaia_DR2.parallax > 0.3,
Gaia_DR2.bp_rp * 2.5 + 2.5 >
Gaia_DR2.phot_g_mean_mag -
5 * (peewee.fn.log(1000 / Gaia_DR2.parallax) - 1),
Gaia_DR2.bp_rp * 2.5 - 1 <
Gaia_DR2.phot_g_mean_mag -
5 * (peewee.fn.log(1000 / Gaia_DR2.parallax) - 1),
peewee.fn.sqrt(Gaia_DR2.phot_bp_n_obs) /
Gaia_DR2.phot_bp_mean_flux_over_error >
peewee.fn.sqrt(Gaia_DR2.phot_g_n_obs) /
Gaia_DR2.phot_g_mean_flux_over_error,
peewee.fn.sqrt(Gaia_DR2.phot_rp_n_obs) /
Gaia_DR2.phot_rp_mean_flux_over_error >
peewee.fn.sqrt(Gaia_DR2.phot_g_n_obs) /
Gaia_DR2.phot_g_mean_flux_over_error * 0.75,
peewee.fn.sqrt(Gaia_DR2.phot_bp_n_obs) /
Gaia_DR2.phot_bp_mean_flux_over_error <
peewee.fn.power(
peewee.fn.sqrt(Gaia_DR2.phot_g_n_obs) /
Gaia_DR2.phot_g_mean_flux_over_error, 0.75),
peewee.fn.sqrt(Gaia_DR2.phot_rp_n_obs) /
Gaia_DR2.phot_rp_mean_flux_over_error <
peewee.fn.power(
peewee.fn.sqrt(Gaia_DR2.phot_g_n_obs) /
Gaia_DR2.phot_g_mean_flux_over_error, 0.95),
peewee.fn.log(
peewee.fn.sqrt(Gaia_DR2.phot_bp_n_obs) /
Gaia_DR2.phot_bp_mean_flux_over_error) * 5 + 11 <
Gaia_DR2.phot_bp_mean_mag -
5 * (peewee.fn.log(1000 / Gaia_DR2.parallax) - 1),
Gaia_DR2.bp_rp > 1.3,
peewee.fn.sqrt(Gaia_DR2.phot_g_n_obs) /
Gaia_DR2.phot_g_mean_flux_over_error > 0.02,
peewee.fn.sqrt(Gaia_DR2.phot_bp_n_obs) /
Gaia_DR2.phot_bp_mean_flux_over_error > 0.02,
peewee.fn.sqrt(Gaia_DR2.phot_rp_n_obs) /
Gaia_DR2.phot_rp_mean_flux_over_error > 0.02))
if query_region:
query = (query
.join_from(CatalogToTIC_v8, Catalog)
.where(peewee.fn.q3c_radial_query(Catalog.ra,
Catalog.dec,
query_region[0],
query_region[1],
query_region[2])))
return query
def post_process(self, model):
"""
cadence options for these targets:
boss_bright_3x1 if RP<14.76 |
boss_bright_4x1 if RP<15.075 |
boss_bright_5x1 if RP<15.29 |
boss_bright_6x1 if RP<15.5
"""
cursor = self.database.execute_sql(
"select catalogid, gaia_dr2_rp from " +
" sandbox.temp_mwm_yso_variable_boss ;")
output = cursor.fetchall()
for i in range(len(output)):
current_catalogid = output[i][0]
current_rp = output[i][1]
if(current_rp < 14.76):
current_instrument = 'BOSS'
current_cadence = 'bright_3x1'
elif(current_rp < 15.075):
current_instrument = 'BOSS'
current_cadence = 'bright_4x1'
elif(current_rp < 15.29):
current_instrument = 'BOSS'
current_cadence = 'bright_5x1'
elif(current_rp < 15.5):
current_instrument = 'BOSS'
current_cadence = 'bright_6x1'
else:
# All cases should be covered above so we should not get here.
current_instrument = None
current_cadence = None
raise TargetSelectionError('error in mwm_yso_variable_boss ' +
'post_process(): ' +
'instrument = None, cadence= None')
if current_instrument is not None:
self.database.execute_sql(
" update sandbox.temp_mwm_yso_variable_boss " +
" set instrument = '" + current_instrument + "'"
" where catalogid = " + str(current_catalogid) + ";")
if current_cadence is not None:
self.database.execute_sql(
" update sandbox.temp_mwm_yso_variable_boss " +
" set cadence = '" + current_cadence + "'"
" where catalogid = " + str(current_catalogid) + ";")
class MWM_YSO_OB_APOGEE_Carton(BaseCarton):
"""YSOs - OB APOGEE Upper (pre-)Main Sequence.
Shorthand name: mwm_yso_ob_apogee
old class name: MWM_YSO_OB_Carton
old shorthand name: mwm_yso_ob
Simplified Description of selection criteria:
Selecting the OB stars at the tip of the main sequence,
brighter than H<13, G<18 mag, closer than parallax>0.3,
color -0.2<BP-RP<1.1, and M_G<(BP-RP)*1.6-2.2
(should have ~8.7K sources)
Wiki page:
https://wiki.sdss.org/display/MWM/YSO+selection+function
Additional source catalogs needed: 2mass, gaia
Additional cross-matching needed:
Return columns: Gaia id, 2mass id, G, BP, RP, J, H, K, parallax
cadence options for these targets
(list all options,
even though no single target will receive more than one):
Pseudo SQL (optional):
Implementation: h_m<13 and bp_rp between -0.2 and 1.1 and
phot_g_mean_mag<18 and
phot_g_mean_mag-5*(log10(1000/parallax)-1) <
1.6*bp_rp-2.2 and parallax>0.3
"""
name = 'mwm_yso_ob_apogee'
category = 'science'
instrument = 'APOGEE'
cadence = 'bright_3x1'
program = 'mwm_yso'
mapper = 'MWM'
priority = 2700
def build_query(self, version_id, query_region=None):
query = (CatalogToTIC_v8
.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
TwoMassPSC.pts_key,
TwoMassPSC.designation.alias('twomass_psc_designation'),
Gaia_DR2.phot_g_mean_mag, Gaia_DR2.phot_bp_mean_mag,
Gaia_DR2.phot_rp_mean_mag.alias('gaia_dr2_rp'),
TwoMassPSC.j_m, TwoMassPSC.h_m,
TwoMassPSC.k_m, Gaia_DR2.parallax)
.join(TIC_v8, on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.join(TwoMassPSC, on=(TIC_v8.twomass_psc == TwoMassPSC.designation))
.switch(TIC_v8)
.join(Gaia_DR2, on=(TIC_v8.gaia_int == Gaia_DR2.source_id))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
TwoMassPSC.h_m < 13,
(Gaia_DR2.bp_rp > -0.2) & (Gaia_DR2.bp_rp < 1.1),
Gaia_DR2.phot_g_mean_mag < 18,
Gaia_DR2.phot_g_mean_mag -
5 * (peewee.fn.log(1000 / Gaia_DR2.parallax) - 1) <
1.6 * Gaia_DR2.bp_rp - 2.2,
Gaia_DR2.parallax > 0.3))
if query_region:
query = (query
.join_from(CatalogToTIC_v8, Catalog)
.where(peewee.fn.q3c_radial_query(Catalog.ra,
Catalog.dec,
query_region[0],
query_region[1],
query_region[2])))
return query
class MWM_YSO_OB_BOSS_Carton(BaseCarton):
"""YSOs - OB BOSS Upper (pre-)Main Sequence.
Shorthand name: mwm_yso_ob_boss
old class name: MWM_YSO_OB_Carton
old shorthand name: mwm_yso_ob
Simplified Description of selection criteria:
Selecting the OB stars at the tip of the main sequence,
brighter than rp<15.5, G<18 mag, closer than parallax>0.3,
color -0.2<BP-RP<1.1, and M_G<(BP-RP)*1.6-2.2
(should have ~8.7K sources)
Wiki page:
https://wiki.sdss.org/display/MWM/YSO+selection+function
Additional source catalogs needed: 2mass, gaia
Additional cross-matching needed:
Return columns: Gaia id, 2mass id, G, BP, RP, J, H, K, parallax
cadence options for these targets
(list all options,
even though no single target will receive more than one):
boss_bright_3x1 if RP<14.76 |
boss_bright_4x1 if RP<15.075 |
boss_bright_5x1 if RP<15.29 |
boss_bright_6x1 if RP<15.5
Pseudo SQL (optional):
Implementation: rp<15.5 and bp_rp between -0.2 and 1.1 and
phot_g_mean_mag<18 and
phot_g_mean_mag-5*(log10(1000/parallax)-1) <
1.6*bp_rp-2.2 and parallax>0.3
Comments: Split from mwm_yso_ob to request BOSS observations,
assigning cadence and faint limit for carton based on RP instead of H
"""
name = 'mwm_yso_ob_boss'
category = 'science'
instrument = None # instrument is set in post_process()
cadence = None # cadence is set in post_process()
program = 'mwm_yso'
mapper = 'MWM'
priority = 2700
def build_query(self, version_id, query_region=None):
query = (CatalogToTIC_v8
.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
TwoMassPSC.pts_key,
TwoMassPSC.designation.alias('twomass_psc_designation'),
Gaia_DR2.phot_g_mean_mag, Gaia_DR2.phot_bp_mean_mag,
Gaia_DR2.phot_rp_mean_mag.alias('gaia_dr2_rp'),
TwoMassPSC.j_m, TwoMassPSC.h_m,
TwoMassPSC.k_m, Gaia_DR2.parallax)
.join(TIC_v8, on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.join(TwoMassPSC, on=(TIC_v8.twomass_psc == TwoMassPSC.designation))
.switch(TIC_v8)
.join(Gaia_DR2, on=(TIC_v8.gaia_int == Gaia_DR2.source_id))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
Gaia_DR2.phot_rp_mean_mag < 15.5,
(Gaia_DR2.bp_rp > -0.2) & (Gaia_DR2.bp_rp < 1.1),
Gaia_DR2.phot_g_mean_mag < 18,
Gaia_DR2.phot_g_mean_mag -
5 * (peewee.fn.log(1000 / Gaia_DR2.parallax) - 1) <
1.6 * Gaia_DR2.bp_rp - 2.2,
Gaia_DR2.parallax > 0.3))
if query_region:
query = (query
.join_from(CatalogToTIC_v8, Catalog)
.where(peewee.fn.q3c_radial_query(Catalog.ra,
Catalog.dec,
query_region[0],
query_region[1],
query_region[2])))
return query
def post_process(self, model):
"""
cadence options for these targets:
boss_bright_3x1 if RP<14.76 |
boss_bright_4x1 if RP<15.075 |
boss_bright_5x1 if RP<15.29 |
boss_bright_6x1 if RP<15.5
"""
cursor = self.database.execute_sql(
"select catalogid, gaia_dr2_rp from " +
" sandbox.temp_mwm_yso_ob_boss ;")
output = cursor.fetchall()
for i in range(len(output)):
current_catalogid = output[i][0]
current_rp = output[i][1]
if(current_rp < 14.76):
current_instrument = 'BOSS'
current_cadence = 'bright_3x1'
elif(current_rp < 15.075):
current_instrument = 'BOSS'
current_cadence = 'bright_4x1'
elif(current_rp < 15.29):
current_instrument = 'BOSS'
current_cadence = 'bright_5x1'
elif(current_rp < 15.5):
current_instrument = 'BOSS'
current_cadence = 'bright_6x1'
else:
# All cases should be covered above so we should not get here.
current_instrument = None
current_cadence = None
raise TargetSelectionError('error in mwm_yso_ob_boss ' +
'post_process(): ' +
'instrument = None, cadence= None')
if current_instrument is not None:
self.database.execute_sql(
" update sandbox.temp_mwm_yso_ob_boss " +
" set instrument = '" + current_instrument + "'"
" where catalogid = " + str(current_catalogid) + ";")
if current_cadence is not None:
self.database.execute_sql(
" update sandbox.temp_mwm_yso_ob_boss " +
" set cadence = '" + current_cadence + "'"
" where catalogid = " + str(current_catalogid) + ";")
class MWM_YSO_CMZ_APOGEE_Carton(BaseCarton):
"""YSOs - Central Molecular Zone APOGEE.
Shorthand name: mwm_yso_cmz_apogee
old class name: MWM_YSO_CMZ_Carton
old shorthand name: mwm_yso_cmz
Simplified Description of selection criteria:
selection of sources in the central molecular zone
based on spitzer fluxes from mipsgal.
brighter than H<13, have color 8.0-24>2.5, and
have parallax<0.2 or lack a Gaia xmatch.
(should have ~3.2K sources)
Wiki page:
https://wiki.sdss.org/display/MWM/YSO+selection+function
Additional source catalogs needed: mipsgal
Additional cross-matching needed: the table has xmatch included
Return columns:
mipsgal id, 2mass id, j, h, k, 3.6, 4.8, 8.0, 24 mag
cadence options for these targets
(list all options,
even though no single target will receive more than one):
'apogee_bright_3x1'
Pseudo SQL (optional):
Implementation: Hmag<13 and _8_0_-_24_>2.5 and
(parallax<0.2 or parallax is null)
For CMZ, the raw sql query would be:
select ct.catalogid from mipsgal m
join twomass_psc t on twomass_name = designation
join tic_v8 tic on tic.twomass_psc = t.designation
left outer join gaia_dr2_source g on g.source_id = tic.gaia_int
join catalog_to_tic_v8 ct on ct.target_id = tic.id
where m.hmag < 13 and
(m.mag_8_0 - m.mag_24) > 2.5 and
(g.parallax < 0.2 or g.parallax is null)
and ct.version_id = 13 and ct.best is true;
Note you only need one left outer join between TIC and Gaia
(all MIPSGAL targets have a counterpart in 2MASS,
and all 2MASS have an entry in TIC,
but not all the TIC entries have a Gaia counterpart).
Comments: Formerly mwm_yso_cmz, removed check on the position on the sky:
Removed below condition.
l is glon (galactic longitude)
b is glat (galactic latitude)
All four statements below are equivalent.
(l> 358 or l< 2) and
b between -1 and 1
(m.glon > 358 or m.glon < 2) and
(m.glat > -1 and m.glat < 1) and
Sources are within 2 degrees in l and
1 degree in b from the galactic center,
(MIPSGAL.glon > 358) | (MIPSGAL.glon < 2),
(MIPSGAL.glat > -1) & (MIPSGAL.glat < 1),
"""
name = 'mwm_yso_cmz_apogee'
category = 'science'
instrument = 'APOGEE'
cadence = 'bright_3x1'
program = 'mwm_yso'
mapper = 'MWM'
priority = 2700
# mipsgal is a subset of 2MASS
# mipsgal can be joined to twomass_psc via
# mipsgal.twomass_name = TwoMassPSC.designation.
# Then join via TIC and catalog_to_tic.
#
# mipsgal is a subset of 2MASS
# 2MASS is a subset of TIC_v8
# Gaia_DR2 is a subset of TIC_v8
#
# 2MASS is not a subset of Gaia_DR2
# Gaia_DR2 is not a subset of 2MASS
#
# table catalogdb.mipsgal
# Foreign-key constraints:
# "twomass_name_fk" FOREIGN KEY (twomass_name)
# REFERENCES twomass_psc(designation)
#
# Due to below, we do not need a between to Catalog and CatalogToTIC_v8
# Catalog.catalogid == CatalogToTIC_v8.catalogid
# We can remove the join with Catalog in all the cartons
# since catalogid is completely unique (even across different version_id)
# so the join with Catalog doesn't give us anything extra and it's a costly join.
def build_query(self, version_id, query_region=None):
query = (MIPSGAL.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
TwoMassPSC.pts_key,
TwoMassPSC.designation.alias('twomass_psc_designation'),
TwoMassPSC.j_m, TwoMassPSC.h_m,
TwoMassPSC.k_m, MIPSGAL.mag_3_6, MIPSGAL.mag_4_5,
MIPSGAL.mag_5_8, MIPSGAL.mag_8_0, MIPSGAL.mag_24,
MIPSGAL.hmag, Gaia_DR2.parallax,
MIPSGAL.glon, MIPSGAL.glat)
.join(TwoMassPSC, on=(MIPSGAL.twomass_name == TwoMassPSC.designation))
.join(TIC_v8, on=(TIC_v8.twomass_psc == TwoMassPSC.designation))
.join(Gaia_DR2, peewee.JOIN.LEFT_OUTER,
on=(Gaia_DR2.source_id == TIC_v8.gaia_int))
.switch(TIC_v8)
.join(CatalogToTIC_v8, on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
MIPSGAL.hmag < 13,
(MIPSGAL.mag_8_0 - MIPSGAL.mag_24) > 2.5,
(Gaia_DR2.parallax < 0.2) |
(Gaia_DR2.parallax >> None)))
if query_region:
query = (query
.join_from(CatalogToTIC_v8, Catalog)
.where(peewee.fn.q3c_radial_query(Catalog.ra,
Catalog.dec,
query_region[0],
query_region[1],
query_region[2])))
return query
class MWM_YSO_Cluster_APOGEE_Carton(BaseCarton):
"""YSOs - Cluster APOGEE Catalog
Shorthand name: mwm_yso_cluster_apogee
old class name: MWM_YSO_Cluster_Carton
old shorthand name: mwm_yso_cluster
Simplified Description of selection criteria:
Selecting the clustered sources from
the catalog of clustered structures,
with age<7.5 dex and brighter than H<13 mag.
(should have ~45.5K sources)
Wiki page:
https://wiki.sdss.org/display/MWM/YSO+selection+function
Additional source catalogs needed: Kounkel+20 clustered catalog
Additional cross-matching needed:
Return columns: Gaia id, 2mass id, G, BP, RP, J, H, K, parallax
cadence options for these targets
(list all options,
even though no single target will receive more than one):
Pseudo SQL (optional):
Implementation: age<7.5 and h<13
"""
name = 'mwm_yso_cluster_apogee'
category = 'science'
instrument = 'APOGEE'
cadence = 'bright_3x1'
program = 'mwm_yso'
mapper = 'MWM'
priority = 2700
# yso_clustering is a subset of gaia and
# can be joined to gaia_dr2_source via source_id.
#
# table catalogdb.yso_clustering
# Foreign-key constraints:
# "yso_clustering_source_id_fkey" FOREIGN KEY (source_id)
# REFERENCES gaia_dr2_source(source_id)
def build_query(self, version_id, query_region=None):
query = (CatalogToTIC_v8
.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
YSO_Clustering.twomass,
Gaia_DR2.phot_g_mean_mag, Gaia_DR2.phot_bp_mean_mag,
Gaia_DR2.phot_rp_mean_mag.alias('gaia_dr2_rp'),
YSO_Clustering.j, YSO_Clustering.h,
YSO_Clustering.k, Gaia_DR2.parallax)
.join(TIC_v8, on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.join(Gaia_DR2, on=(TIC_v8.gaia_int == Gaia_DR2.source_id))
.join(YSO_Clustering,
on=(Gaia_DR2.source_id == YSO_Clustering.source_id))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
YSO_Clustering.h < 13,
YSO_Clustering.age < 7.5))
if query_region:
query = (query
.join_from(CatalogToTIC_v8, Catalog)
.where(peewee.fn.q3c_radial_query(Catalog.ra,
Catalog.dec,
query_region[0],
query_region[1],
query_region[2])))
return query
class MWM_YSO_Cluster_BOSS_Carton(BaseCarton):
"""YSOs - Cluster BOSS Catalog
Shorthand name: mwm_yso_cluster_boss
old class name: MWM_YSO_Cluster_Carton
old shorthand name: mwm_yso_cluster
Simplified Description of selection criteria:
Selecting the clustered sources from
the catalog of clustered structures,
with age<7.5 dex and brighter than rp<15.5 mag.
Wiki page:
https://wiki.sdss.org/display/MWM/YSO+selection+function
Additional source catalogs needed: Kounkel+20 clustered catalog
Additional cross-matching needed:
Return columns: Gaia id, 2mass id, G, BP, RP, J, H, K, parallax
cadence options for these targets
(list all options,
even though no single target will receive more than one):
cadence options for these targets:
boss_bright_3x1 if RP<14.76 |
boss_bright_4x1 if RP<15.075 |
boss_bright_5x1 if RP<15.29 |
boss_bright_6x1 if RP<15.5
Pseudo SQL (optional):
Implementation: age<7.5 and rp<15.5
Comments: Split from Cluster to request BOSS observations,
assigning cadence and faint limit for carton based on RP instead of H
"""
name = 'mwm_yso_cluster_boss'
category = 'science'
instrument = None # instrument is set in post_process()
cadence = None # cadence is set in post_process()
program = 'mwm_yso'
mapper = 'MWM'
priority = 2700
# yso_clustering is a subset of gaia and
# can be joined to gaia_dr2_source via source_id.
#
# table catalogdb.yso_clustering
# Foreign-key constraints:
# "yso_clustering_source_id_fkey" FOREIGN KEY (source_id)
# REFERENCES gaia_dr2_source(source_id)
def build_query(self, version_id, query_region=None):
query = (CatalogToTIC_v8
.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
YSO_Clustering.twomass,
Gaia_DR2.phot_g_mean_mag, Gaia_DR2.phot_bp_mean_mag,
Gaia_DR2.phot_rp_mean_mag.alias('gaia_dr2_rp'),
YSO_Clustering.j, YSO_Clustering.h,
YSO_Clustering.k, Gaia_DR2.parallax)
.join(TIC_v8, on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.join(Gaia_DR2, on=(TIC_v8.gaia_int == Gaia_DR2.source_id))
.join(YSO_Clustering,
on=(Gaia_DR2.source_id == YSO_Clustering.source_id))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
Gaia_DR2.phot_rp_mean_mag < 15.5,
YSO_Clustering.age < 7.5))
if query_region:
query = (query
.join_from(CatalogToTIC_v8, Catalog)
.where(peewee.fn.q3c_radial_query(Catalog.ra,
Catalog.dec,
query_region[0],
query_region[1],
query_region[2])))
return query
def post_process(self, model):
"""
cadence options for these targets:
boss_bright_3x1 if RP<14.76 |
boss_bright_4x1 if RP<15.075 |
boss_bright_5x1 if RP<15.29 |
boss_bright_6x1 if RP<15.5
"""
cursor = self.database.execute_sql(
"select catalogid, gaia_dr2_rp from " +
" sandbox.temp_mwm_yso_cluster_boss ;")
output = cursor.fetchall()
for i in range(len(output)):
current_catalogid = output[i][0]
current_rp = output[i][1]
if(current_rp < 14.76):
current_instrument = 'BOSS'
current_cadence = 'bright_3x1'
elif(current_rp < 15.075):
current_instrument = 'BOSS'
current_cadence = 'bright_4x1'
elif(current_rp < 15.29):
current_instrument = 'BOSS'
current_cadence = 'bright_5x1'
elif(current_rp < 15.5):
current_instrument = 'BOSS'
current_cadence = 'bright_6x1'
else:
# All cases should be covered above so we should not get here.
current_instrument = None
current_cadence = None
raise TargetSelectionError('error in mwm_yso_cluster_boss ' +
'post_process(): ' +
'instrument = None, cadence= None')
if current_instrument is not None:
self.database.execute_sql(
" update sandbox.temp_mwm_yso_cluster_boss " +
" set instrument = '" + current_instrument + "'"
" where catalogid = " + str(current_catalogid) + ";")
if current_cadence is not None:
self.database.execute_sql(
" update sandbox.temp_mwm_yso_cluster_boss " +
" set cadence = '" + current_cadence + "'"
" where catalogid = " + str(current_catalogid) + ";")
class MWM_YSO_PMS_APOGEE_Carton(BaseCarton):
"""
YSOs - Pre-main sequence, APOGEE
Shorthand name: mwm_yso_pms_apogee
Comments: New
Simplified Description of selection criteria:
Selecting the clustered sources from the catalog of vetted
pre-main sequence stars
Wiki page: https://wiki.sdss.org/display/MWM/YSO+selection+function
Additional source catalogs needed: catalogdb.sagitta, catalogdb.zari18pms
Return columns: Gaia id, 2mass id, G, BP, RP, J, H, K, parallax
cadence options for these targets
(list all options, even though no single target will receive more than one):
apogee_bright_3x1 (for 7 < H < 13)
Implementation: (in sagitta | in zari18pms) & h<13
lead contact:Marina Kounkel
"""
# peewee Model name ---> postgres table name
# Gaia_DR2(CatalogdbModel)--->'gaia_dr2_source'
# Zari18pms(CatalogdbModel)--->'catalogdb.zari18pms'
# Zari18ums(CatalogdbModel)--->'catalogdb.zari18ums'
# Sagitta(CatalogdbModel)--->'catalogdb.sagitta'
# TwoMassPSC(CatalogdbModel)--->'catalogdb.twomass_psc'
name = 'mwm_yso_pms_apogee'
category = 'science'
instrument = 'APOGEE'
cadence = 'bright_3x1'
program = 'mwm_yso'
mapper = 'MWM'
priority = 2700
def build_query(self, version_id, query_region=None):
# join with Sagitta
query1 = (CatalogToTIC_v8
.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
TwoMassPSC.pts_key,
TwoMassPSC.designation.alias('twomass_psc_designation'),
Gaia_DR2.phot_g_mean_mag, Gaia_DR2.phot_bp_mean_mag,
Gaia_DR2.phot_rp_mean_mag.alias('gaia_dr2_rp'),
TwoMassPSC.j_m, TwoMassPSC.h_m,
TwoMassPSC.k_m, Gaia_DR2.parallax)
.join(TIC_v8, on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.join(Gaia_DR2, on=(TIC_v8.gaia_int == Gaia_DR2.source_id))
.switch(TIC_v8)
.join(TwoMassPSC, on=(TIC_v8.twomass_psc == TwoMassPSC.designation))
.switch(Gaia_DR2)
.join(Sagitta,
on=(Gaia_DR2.source_id == Sagitta.source_id))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
TwoMassPSC.h_m < 13))
# join with Zari18pms
query2 = (CatalogToTIC_v8
.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
TwoMassPSC.pts_key,
TwoMassPSC.designation.alias('twomass_psc_designation'),
Gaia_DR2.phot_g_mean_mag, Gaia_DR2.phot_bp_mean_mag,
Gaia_DR2.phot_rp_mean_mag.alias('gaia_dr2_rp'),
TwoMassPSC.j_m, TwoMassPSC.h_m,
TwoMassPSC.k_m, Gaia_DR2.parallax)
.join(TIC_v8, on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.join(Gaia_DR2, on=(TIC_v8.gaia_int == Gaia_DR2.source_id))
.switch(TIC_v8)
.join(TwoMassPSC, on=(TIC_v8.twomass_psc == TwoMassPSC.designation))
.switch(Gaia_DR2)
.join(Zari18pms,
on=(Gaia_DR2.source_id == Zari18pms.source))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
TwoMassPSC.h_m < 13))
# | is for peewee SQL union
query = query1 | query2
if query_region:
query = (query
.join_from(CatalogToTIC_v8, Catalog)
.where(peewee.fn.q3c_radial_query(Catalog.ra,
Catalog.dec,
query_region[0],
query_region[1],
query_region[2])))
return query
class MWM_YSO_PMS_BOSS_Carton(BaseCarton):
"""
YSOs - Pre-main sequence, BOSS
Shorthand name: mwm_yso_pms_boss
Comments: New, Split from PMS
Simplified Description of selection criteria:
Selecting the clustered sources from the catalog of vetted
pre-main sequence stars
Wiki page: https://wiki.sdss.org/display/MWM/YSO+selection+function
Additional source catalogs needed: catalogdb.sagitta, catalogdb.zari18pms
Return columns: Gaia id, 2mass id, G, BP, RP, J, H, K, parallax
cadence options for these targets:
boss_bright_3x1 if RP<14.76 |
boss_bright_4x1 if RP<15.075 |
boss_bright_5x1 if RP<15.29 |
boss_bright_6x1 if RP<15.5
Implementation: (in sagitta | in zari18pms) & rp<15.5
lead contact:Marina Kounkel
"""
# peewee Model name ---> postgres table name
# Gaia_DR2(CatalogdbModel)--->'gaia_dr2_source'
# Zari18pms(CatalogdbModel)--->'catalogdb.zari18pms'
# Zari18ums(CatalogdbModel)--->'catalogdb.zari18ums'
# Sagitta(CatalogdbModel)--->'catalogdb.sagitta'
# TwoMassPSC(CatalogdbModel)--->'catalogdb.twomass_psc'
name = 'mwm_yso_pms_boss'
category = 'science'
instrument = None # instrument is set in post_process()
cadence = None # cadence is set in post_process()
program = 'mwm_yso'
mapper = 'MWM'
priority = 2700
def build_query(self, version_id, query_region=None):
# join with Sagitta
query1 = (CatalogToTIC_v8
.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
TwoMassPSC.pts_key,
TwoMassPSC.designation.alias('twomass_psc_designation'),
Gaia_DR2.phot_g_mean_mag, Gaia_DR2.phot_bp_mean_mag,
Gaia_DR2.phot_rp_mean_mag.alias('gaia_dr2_rp'),
TwoMassPSC.j_m, TwoMassPSC.h_m,
TwoMassPSC.k_m, Gaia_DR2.parallax)
.join(TIC_v8, on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.join(Gaia_DR2, on=(TIC_v8.gaia_int == Gaia_DR2.source_id))
.switch(TIC_v8)
.join(TwoMassPSC, on=(TIC_v8.twomass_psc == TwoMassPSC.designation))
.switch(Gaia_DR2)
.join(Sagitta,
on=(Gaia_DR2.source_id == Sagitta.source_id))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
Gaia_DR2.phot_rp_mean_mag < 15.5))
# join with Zari18pms
query2 = (CatalogToTIC_v8
.select(CatalogToTIC_v8.catalogid, Gaia_DR2.source_id,
Gaia_DR2.ra.alias('gaia_dr2_ra'),
Gaia_DR2.dec.alias('gaia_dr2_dec'),
TwoMassPSC.pts_key,
TwoMassPSC.designation.alias('twomass_psc_designation'),
Gaia_DR2.phot_g_mean_mag, Gaia_DR2.phot_bp_mean_mag,
Gaia_DR2.phot_rp_mean_mag.alias('gaia_dr2_rp'),
TwoMassPSC.j_m, TwoMassPSC.h_m,
TwoMassPSC.k_m, Gaia_DR2.parallax)
.join(TIC_v8, on=(CatalogToTIC_v8.target_id == TIC_v8.id))
.join(Gaia_DR2, on=(TIC_v8.gaia_int == Gaia_DR2.source_id))
.switch(TIC_v8)
.join(TwoMassPSC, on=(TIC_v8.twomass_psc == TwoMassPSC.designation))
.switch(Gaia_DR2)
.join(Zari18pms,
on=(Gaia_DR2.source_id == Zari18pms.source))
.where(CatalogToTIC_v8.version_id == version_id,
CatalogToTIC_v8.best >> True,
Gaia_DR2.phot_rp_mean_mag < 15.5))
# | is for peewee SQL union
query = query1 | query2
if query_region:
query = (query
.join_from(CatalogToTIC_v8, Catalog)
.where(peewee.fn.q3c_radial_query(Catalog.ra,
Catalog.dec,
query_region[0],
query_region[1],
query_region[2])))
return query
def post_process(self, model):
"""
cadence options for these targets:
boss_bright_3x1 if RP<14.76 |
boss_bright_4x1 if RP<15.075 |
boss_bright_5x1 if RP<15.29 |
boss_bright_6x1 if RP<15.5
"""
cursor = self.database.execute_sql(
"select catalogid, gaia_dr2_rp from " +
" sandbox.temp_mwm_yso_pms_boss ;")
output = cursor.fetchall()
for i in range(len(output)):
current_catalogid = output[i][0]
current_rp = output[i][1]
if(current_rp < 14.76):
current_instrument = 'BOSS'
current_cadence = 'bright_3x1'
elif(current_rp < 15.075):
current_instrument = 'BOSS'
current_cadence = 'bright_4x1'
elif(current_rp < 15.29):
current_instrument = 'BOSS'
current_cadence = 'bright_5x1'
elif(current_rp < 15.5):
current_instrument = 'BOSS'
current_cadence = 'bright_6x1'
else:
# All cases should be covered above so we should not get here.
current_instrument = None
current_cadence = None
raise TargetSelectionError('error in mwm_yso_pms_boss ' +
'post_process(): ' +
'instrument = None, cadence= None')
if current_instrument is not None:
self.database.execute_sql(
" update sandbox.temp_mwm_yso_pms_boss " +
" set instrument = '" + current_instrument + "'"
" where catalogid = " + str(current_catalogid) + ";")
if current_cadence is not None:
self.database.execute_sql(
" update sandbox.temp_mwm_yso_pms_boss " +
" set cadence = '" + current_cadence + "'"
" where catalogid = " + str(current_catalogid) + ";")
| 43.568489
| 88
| 0.561307
| 8,372
| 67,749
| 4.277831
| 0.053751
| 0.061568
| 0.033786
| 0.023734
| 0.893673
| 0.877618
| 0.864327
| 0.854135
| 0.850868
| 0.845396
| 0
| 0.04483
| 0.354013
| 67,749
| 1,554
| 89
| 43.596525
| 0.773495
| 0.306056
| 0
| 0.91226
| 0
| 0
| 0.072433
| 0.019825
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021635
| false
| 0
| 0.004808
| 0
| 0.167067
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b0fee27a1c84b3b92b89c95632d7bc237ac344f3
| 94
|
py
|
Python
|
tests/test_script_pipeline.py
|
Forks-yugander-krishan-singh/jenkins-job-builder-pipeline
|
c8aac16b97eb89882e0a5a7250ad8ed33ca7ddd8
|
[
"Apache-2.0"
] | null | null | null |
tests/test_script_pipeline.py
|
Forks-yugander-krishan-singh/jenkins-job-builder-pipeline
|
c8aac16b97eb89882e0a5a7250ad8ed33ca7ddd8
|
[
"Apache-2.0"
] | null | null | null |
tests/test_script_pipeline.py
|
Forks-yugander-krishan-singh/jenkins-job-builder-pipeline
|
c8aac16b97eb89882e0a5a7250ad8ed33ca7ddd8
|
[
"Apache-2.0"
] | null | null | null |
from base import assert_case
def test_script_pipeline():
assert_case('script_pipeline')
| 15.666667
| 34
| 0.787234
| 13
| 94
| 5.307692
| 0.692308
| 0.289855
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.138298
| 94
| 5
| 35
| 18.8
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0.159574
| 0
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0.333333
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
b02c1edb35efed2af8ead59b4a2b1e4957fcf74d
| 11,362
|
py
|
Python
|
code/python/dataprocessing.py
|
nordin11/DataProject
|
9714e9320d2849d1f4eb3224cb3521cb10138744
|
[
"MIT"
] | null | null | null |
code/python/dataprocessing.py
|
nordin11/DataProject
|
9714e9320d2849d1f4eb3224cb3521cb10138744
|
[
"MIT"
] | 2
|
2018-01-14T23:14:37.000Z
|
2018-01-21T21:54:23.000Z
|
code/python/dataprocessing.py
|
nordin11/DataProject
|
9714e9320d2849d1f4eb3224cb3521cb10138744
|
[
"MIT"
] | null | null | null |
# coding: utf-8
# In[1]:
from pandas_datareader import data
import matplotlib.pyplot as plt
import pandas as pd
import numpy as np
import json
import io
# In[2]:
## FINANCIAL INDUSTRY
# Call the stocks I want to analyze
financeTickers = ['JPM','BAC','WFC','V','C']
# Define the data source
data_source = 'yahoo'
# Define the time-scale
start_date = '2000-01-01'
end_date = '2018-01-01'
# Get the data
panel_data = data.DataReader(financeTickers, data_source, start_date, end_date)
# Getting just the adjusted closing prices. This will return a Pandas DataFrame
# The index in this DataFrame is the major index of the panel_data
close = panel_data.loc['Close']
# Getting all weekdays between 01/01/2000 and 12/31/2016
all_weekdays = pd.date_range(start=start_date, end=end_date, freq='B')
# How do we align the existing prices in adj_close with our new set of dates?
# All we need to do is reindex close using all_weekdays as the new index
close = close.reindex(all_weekdays)
# Drop the dates where one of the companies wasn't public yet
close = close.dropna(axis=0, how='any')
# Define the table as a matrix
close = np.asmatrix(close)
# Print the FINANCE correlation matrix
print np.corrcoef(close, rowvar=False)
# In[3]:
## TECH INDUSTRY
# Call the stocks I want to analyze
techTickers = ['GOOGL','MSFT','FB','T','VZ']
# Define the data source
data_source = 'yahoo'
# Define the time-scale
start_date = '2000-01-01'
end_date = '2018-01-01'
# Get the data
panel_data = data.DataReader(techTickers, data_source, start_date, end_date)
# Getting just the adjusted closing prices. This will return a Pandas DataFrame
# The index in this DataFrame is the major index of the panel_data
close = panel_data.loc['Close']
# Getting all weekdays between 01/01/2000 and 12/31/2016
all_weekdays = pd.date_range(start=start_date, end=end_date, freq='B')
# How do we align the existing prices in adj_close with our new set of dates?
# All we need to do is reindex close using all_weekdays as the new index
close = close.reindex(all_weekdays)
# Drop the dates where one of the companies wasn't public yet
close = close.dropna(axis=0, how='any')
# Define the table as a matrix
close = np.asmatrix(close)
# Print the TECH correlation matrix
print np.corrcoef(close, rowvar=False)
# In[4]:
## SERVICES
# Call the stocks I want to analyze
servicesTickers = ['AMZN','BABA','WMT','HD','CMCSA']
# Define the data source
data_source = 'yahoo'
# Define the time-scale
start_date = '2000-01-01'
end_date = '2018-01-01'
# Get the data
panel_data = data.DataReader(servicesTickers, data_source, start_date, end_date)
# Getting just the adjusted closing prices. This will return a Pandas DataFrame
# The index in this DataFrame is the major index of the panel_data.
close = panel_data.loc['Close']
# Getting all weekdays between 01/01/2000 and 12/31/2016
all_weekdays = pd.date_range(start=start_date, end=end_date, freq='B')
# How do we align the existing prices in adj_close with our new set of dates?
# All we need to do is reindex close using all_weekdays as the new index
close = close.reindex(all_weekdays)
# Drop the dates where one of the companies wasn't public yet
close = close.dropna(axis=0, how='any')
# Define the table as a matrix
close = np.asmatrix(close)
# Print the SERVICES correlation matrix
print np.corrcoef(close, rowvar=False)
# In[5]:
## BASIC MATERIALS
# Call the stocks I want to analyze
basicTickers = ['XOM','RDS-B','PTR','CVX','BP']
# Define the data source
data_source = 'yahoo'
# Define the time-scale
start_date = '2000-01-01'
end_date = '2018-01-01'
# Get the data
panel_data = data.DataReader(basicTickers, data_source, start_date, end_date)
# Getting just the adjusted closing prices. This will return a Pandas DataFrame
# The index in this DataFrame is the major index of the panel_data.
close = panel_data.loc['Close']
# Getting all weekdays between 01/01/2000 and 12/31/2016
all_weekdays = pd.date_range(start=start_date, end=end_date, freq='B')
# How do we align the existing prices in adj_close with our new set of dates?
# All we need to do is reindex close using all_weekdays as the new index
close = close.reindex(all_weekdays)
# Drop the dates where one of the companies wasn't public yet
close = close.dropna(axis=0, how='any')
# Define the table as a matrix
close = np.asmatrix(close)
# Print the MATERIALS correlation matrix
print np.corrcoef(close, rowvar=False)
# In[6]:
## CONSUMER GOODS
# Call the stocks I want to analyze
consumerTickers = ['AAPL','PG','BUD','KO','TM']
# Define the data source
data_source = 'yahoo'
# Define the time-scale
start_date = '2000-01-01'
end_date = '2018-01-01'
# Get the data
panel_data = data.DataReader(consumerTickers, data_source, start_date, end_date)
# Getting just the adjusted closing prices. This will return a Pandas DataFrame
# The index in this DataFrame is the major index of the panel_data.
close = panel_data.loc['Close']
# Getting all weekdays between 01/01/2000 and 12/31/2016
all_weekdays = pd.date_range(start=start_date, end=end_date, freq='B')
# How do we align the existing prices in adj_close with our new set of dates?
# All we need to do is reindex close using all_weekdays as the new index
close = close.reindex(all_weekdays)
# Drop the dates where one of the companies wasn't public yet
close = close.dropna(axis=0, how='any')
# Define the table as a matrix
close = np.asmatrix(close)
# Print the GOODS correlation matrix
print np.corrcoef(close, rowvar=False)
# In[45]:
## ALL INDUSTRIES
# Call the stocks I want to analyze
financeTickers = ['JPM','BAC','WFC','V','C']
techTickers = ['GOOGL','MSFT','FB','T','VZ']
servicesTickers = ['AMZN','BABA','WMT','HD','CMCSA']
basicTickers = ['XOM','RDS-B','PTR','CVX','BP']
consumerTickers = ['AAPL','PG','BUD','KO','TM']
# group all tickers together
AllTickers = ['JPM','BAC','WFC','V','C','GOOGL','MSFT','FB','T','VZ','AMZN','BABA','WMT','HD','CMCSA','XOM','RDS-B','PTR','CVX','BP','AAPL','PG','BUD','KO','TM']
# Define the data source
data_source = 'yahoo'
# Define the time-scale
start_date = '2000-01-01'
end_date = '2018-01-01'
# Get the data
panel_data = data.DataReader(AllTickers, data_source, start_date, end_date)
# Getting just the adjusted closing prices. This will return a Pandas DataFrame
# The index in this DataFrame is the major index of the panel_data.
close = panel_data.loc['Close']
# Getting all weekdays between 01/01/2000 and 12/31/2016
all_weekdays = pd.date_range(start=start_date, end=end_date, freq='B')
# How do we align the existing prices in adj_close with our new set of dates?
# All we need to do is reindex close using all_weekdays as the new index
close = close.reindex(all_weekdays)
# Drop the dates where one of the companies wasn't public yet
close = close.dropna(axis=0, how='any')
# Define the table as a matrix
close = np.matrix(close)
# Define and print the correlation matrix in absolute values
close = np.corrcoef(close, rowvar=False)
a = np.zeros(len(AllTickers))
for i in range(len(AllTickers)):
a[i] = np.sum(abs(close[i]))
print a - 1
# In[8]:
## ALL INDUSTRIES
# Call the stocks I want to analyze
financeTickers = ['JPM','BAC','WFC','V','C']
techTickers = ['GOOGL','MSFT','FB','T','VZ']
servicesTickers = ['AMZN','BABA','WMT','HD','CMCSA']
basicTickers = ['XOM','RDS-B','PTR','CVX','BP']
consumerTickers = ['AAPL','PG','BUD','KO','TM']
# group all tickers together
AllTickers = ['JPM','BAC','WFC','V','C','GOOGL','MSFT','FB','T','VZ','AMZN','BABA','WMT','HD','CMCSA','XOM','RDS-B','PTR','CVX','BP','AAPL','PG','BUD','KO','TM']
# Define the data source
data_source = 'yahoo'
# Define the time-scale
start_date = '2000-01-01'
end_date = '2018-01-01'
# Get the data
panel_data = data.DataReader(AllTickers, data_source, start_date, end_date)
# Getting just the adjusted closing prices. This will return a Pandas DataFrame
# The index in this DataFrame is the major index of the panel_data.
close = panel_data.loc['Close']
# Getting all weekdays between 01/01/2000 and 12/31/2016
all_weekdays = pd.date_range(start=start_date, end=end_date, freq='B')
# How do we align the existing prices in adj_close with our new set of dates?
# All we need to do is reindex close using all_weekdays as the new index
close = close.reindex(all_weekdays)
# Drop the dates where one of the companies wasn't public yet
close = close.dropna(axis=0, how='any')
# Define the table as a matrix
close = np.matrix(close)
# Print the correlation matrix
c = np.corrcoef(close, rowvar=False)
# manipulate the data so that I can output the proper format for the network
nodes = []
# define the different industries by seperating it in different groups
for i in range(len(AllTickers)):
if i < len(financeTickers):
nodes.append({"id":AllTickers[i], "group": 1})
elif i < len(financeTickers) + len(techTickers):
nodes.append({"id":AllTickers[i], "group": 2})
elif i < len(financeTickers) + len(techTickers) + len(servicesTickers):
nodes.append({"id":AllTickers[i], "group": 3})
elif i < len(financeTickers) + len(techTickers) + len(servicesTickers) + len(basicTickers):
nodes.append({"id":AllTickers[i], "group": 4})
else:
nodes.append({"id":AllTickers[i], "group": 5})
links = []
# Go through the stocks and link the stocks and connections with eachother to the correlation matrix.
for i in range(len(AllTickers)):
for j in range(1,len(AllTickers) - i):
links.append({"source" : AllTickers[i],"target" : AllTickers[i + j],"value" : c[i,i+j]})
# bring together the two dictionaries into one big dict
json_data = {
"nodes": nodes,
"links": links
}
network = json.dumps(json_data)
# copied this print into a downloaded json file.
print network
# In[29]:
## ALL INDUSTRIES
# Call the stocks I want to analyze
financeTickers = ['JPM','BAC','WFC','V','C']
techTickers = ['GOOGL','MSFT','FB','T','VZ']
servicesTickers = ['AMZN','BABA','WMT','HD','CMCSA']
basicTickers = ['XOM','RDS-B','PTR','CVX','BP']
consumerTickers = ['AAPL','PG','BUD','KO','TM']
# group all tickers together
AllTickers = ['JPM','BAC','WFC','V','C','GOOGL','MSFT','FB','T','VZ','AMZN','BABA','WMT','HD','CMCSA','XOM','RDS-B','PTR','CVX','BP','AAPL','PG','BUD','KO','TM']
# Define the data source
data_source = 'yahoo'
# Define the time-scale
start_date = '2000-01-01'
end_date = '2018-01-01'
# Get the data
panel_data = data.DataReader(AllTickers, data_source, start_date, end_date)
# Getting just the adjusted closing prices. This will return a Pandas DataFrame
# The index in this DataFrame is the major index of the panel_data.
close = panel_data.loc['Close']
# Getting all weekdays between 01/01/2000 and 12/31/2016
all_weekdays = pd.date_range(start=start_date, end=end_date, freq='B')
# How do we align the existing prices in adj_close with our new set of dates?
# All we need to do is reindex close using all_weekdays as the new index
close = close.reindex(all_weekdays)
# Drop the dates where one of the companies wasn't public yet
close = close.dropna(axis=0, how='any')
# normalize de data by defining relative gain. starting at the first price 1.0
close = close/close.iloc[0, :]
close.to_csv('price_relative_gain.csv', encoding='utf-8')
| 29.359173
| 161
| 0.71079
| 1,853
| 11,362
| 4.286023
| 0.111171
| 0.044321
| 0.024175
| 0.014102
| 0.871947
| 0.865399
| 0.824352
| 0.810753
| 0.79174
| 0.760262
| 0
| 0.030024
| 0.158687
| 11,362
| 386
| 162
| 29.435233
| 0.800816
| 0.454673
| 0
| 0.718519
| 0
| 0
| 0.142834
| 0.003794
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.044444
| null | null | 0.051852
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
c67e234fb990bd562f7add66f3e6cadd569e07b3
| 135
|
py
|
Python
|
tsserver/configutils.py
|
m4tx/techswarm-server
|
a04a3e2a731c3a086aa8476f66adda64973dcd66
|
[
"MIT"
] | 1
|
2016-08-12T14:27:31.000Z
|
2016-08-12T14:27:31.000Z
|
tsserver/configutils.py
|
TechSwarm/techswarm-server
|
a04a3e2a731c3a086aa8476f66adda64973dcd66
|
[
"MIT"
] | null | null | null |
tsserver/configutils.py
|
TechSwarm/techswarm-server
|
a04a3e2a731c3a086aa8476f66adda64973dcd66
|
[
"MIT"
] | null | null | null |
import os
from tsserver import app
def get_upload_dir():
return os.path.join(app.root_path, app.config['PHOTOS_UPLOAD_FOLDER'])
| 16.875
| 74
| 0.762963
| 22
| 135
| 4.454545
| 0.727273
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.133333
| 135
| 7
| 75
| 19.285714
| 0.837607
| 0
| 0
| 0
| 0
| 0
| 0.148148
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
c682db5388940d53e4b8b265685d517094fe0e6d
| 88
|
py
|
Python
|
soam/core/__init__.py
|
MuttData/soam
|
65612a02552668c6721dc20e675654883391c3e9
|
[
"Apache-2.0"
] | 1
|
2021-09-17T01:14:57.000Z
|
2021-09-17T01:14:57.000Z
|
soam/core/__init__.py
|
MuttData/soam
|
65612a02552668c6721dc20e675654883391c3e9
|
[
"Apache-2.0"
] | null | null | null |
soam/core/__init__.py
|
MuttData/soam
|
65612a02552668c6721dc20e675654883391c3e9
|
[
"Apache-2.0"
] | 1
|
2021-08-09T14:22:50.000Z
|
2021-08-09T14:22:50.000Z
|
"""SoaM core."""
from soam.core.runner import SoamFlow
from soam.core.step import Step
| 17.6
| 37
| 0.75
| 14
| 88
| 4.714286
| 0.5
| 0.363636
| 0.363636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.125
| 88
| 4
| 38
| 22
| 0.857143
| 0.113636
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
c689b5418a7a0887640ee647ae675dfb3518bc81
| 8,039
|
py
|
Python
|
import_export_google_civic/migrations/0001_initial.py
|
adborden/WeVoteBase
|
7fd612aee1d3638c8a74cc81873ce0687f62cf33
|
[
"MIT"
] | null | null | null |
import_export_google_civic/migrations/0001_initial.py
|
adborden/WeVoteBase
|
7fd612aee1d3638c8a74cc81873ce0687f62cf33
|
[
"MIT"
] | null | null | null |
import_export_google_civic/migrations/0001_initial.py
|
adborden/WeVoteBase
|
7fd612aee1d3638c8a74cc81873ce0687f62cf33
|
[
"MIT"
] | 1
|
2020-03-04T00:22:39.000Z
|
2020-03-04T00:22:39.000Z
|
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
]
operations = [
migrations.CreateModel(
name='GoogleCivicCandidateCampaign',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('name', models.CharField(max_length=254, verbose_name=b'google civic candidate name')),
('party', models.CharField(max_length=254, null=True, verbose_name=b'google civic party', blank=True)),
('photo_url', models.CharField(max_length=254, null=True, verbose_name=b'google civic photoUrl', blank=True)),
('order_on_ballot', models.CharField(max_length=254, null=True, verbose_name=b'google civic order on ballot', blank=True)),
('google_civic_contest_office_id', models.CharField(max_length=254, verbose_name=b'google civic internal temp contest_office_id id')),
('we_vote_contest_office_id', models.CharField(max_length=254, null=True, verbose_name=b'we vote contest_office_id id', blank=True)),
('google_civic_election_id', models.CharField(max_length=254, verbose_name=b'google election id')),
('we_vote_election_id', models.CharField(max_length=254, null=True, verbose_name=b'we vote election id', blank=True)),
('we_vote_candidate_campaign_id', models.CharField(max_length=254, null=True, verbose_name=b'we vote candidate campaign id', blank=True)),
('we_vote_politician_id', models.CharField(max_length=254, null=True, verbose_name=b'we vote politician id', blank=True)),
('candidate_url', models.URLField(null=True, verbose_name=b'website url of candidate campaign', blank=True)),
('facebook_url', models.URLField(null=True, verbose_name=b'facebook url of candidate campaign', blank=True)),
('twitter_url', models.URLField(null=True, verbose_name=b'twitter url of candidate campaign', blank=True)),
('google_plus_url', models.URLField(null=True, verbose_name=b'google plus url of candidate campaign', blank=True)),
('youtube_url', models.URLField(null=True, verbose_name=b'youtube url of candidate campaign', blank=True)),
('email', models.CharField(max_length=254, null=True, verbose_name=b'google civic candidate campaign email', blank=True)),
('phone', models.CharField(max_length=254, null=True, verbose_name=b'google civic candidate campaign email', blank=True)),
('was_processed', models.BooleanField(default=False, verbose_name=b'is primary election')),
],
),
migrations.CreateModel(
name='GoogleCivicContestOffice',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('office', models.CharField(max_length=254, verbose_name=b'google civic office')),
('google_civic_election_id', models.CharField(max_length=254, null=True, verbose_name=b'google civic election id', blank=True)),
('we_vote_election_id', models.CharField(max_length=254, null=True, verbose_name=b'we vote election id', blank=True)),
('we_vote_contest_office_id', models.CharField(max_length=254, null=True, verbose_name=b'we vote contest office id', blank=True)),
('number_voting_for', models.CharField(max_length=254, null=True, verbose_name=b'google civic number of candidates to vote for', blank=True)),
('number_elected', models.CharField(max_length=254, null=True, verbose_name=b'google civic number of candidates who will be elected', blank=True)),
('contest_level0', models.CharField(max_length=254, null=True, verbose_name=b'google civic level, option 0', blank=True)),
('contest_level1', models.CharField(max_length=254, null=True, verbose_name=b'google civic level, option 1', blank=True)),
('contest_level2', models.CharField(max_length=254, null=True, verbose_name=b'google civic level, option 2', blank=True)),
('ballot_placement', models.CharField(max_length=254, null=True, verbose_name=b'google civic ballot placement', blank=True)),
('primary_party', models.CharField(max_length=254, null=True, verbose_name=b'google civic primary party', blank=True)),
('district_name', models.CharField(max_length=254, verbose_name=b'google civic district name')),
('district_scope', models.CharField(max_length=254, verbose_name=b'google civic district scope')),
('district_ocd_id', models.CharField(max_length=254, verbose_name=b'google civic district ocd id')),
('electorate_specifications', models.CharField(max_length=254, null=True, verbose_name=b'google civic primary party', blank=True)),
('special', models.CharField(max_length=254, null=True, verbose_name=b'google civic primary party', blank=True)),
('was_processed', models.BooleanField(default=False, verbose_name=b'is primary election')),
],
),
migrations.CreateModel(
name='GoogleCivicContestReferendum',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('referendum_title', models.CharField(max_length=254, verbose_name=b'google civic referendum title')),
('referendum_subtitle', models.CharField(max_length=254, verbose_name=b'google civic referendum subtitle')),
('referendum_url', models.CharField(max_length=254, null=True, verbose_name=b'google civic referendum details url')),
('google_civic_election_id', models.CharField(max_length=254, verbose_name=b'google civic election id')),
('we_vote_election_id', models.CharField(max_length=254, null=True, verbose_name=b'we vote election id', blank=True)),
('ballot_placement', models.CharField(max_length=254, null=True, verbose_name=b'google civic ballot placement', blank=True)),
('primary_party', models.CharField(max_length=254, null=True, verbose_name=b'google civic primary party', blank=True)),
('district_name', models.CharField(max_length=254, verbose_name=b'google civic district name')),
('district_scope', models.CharField(max_length=254, verbose_name=b'google civic district scope')),
('district_ocd_id', models.CharField(max_length=254, verbose_name=b'google civic district ocd id')),
('electorate_specifications', models.CharField(max_length=254, null=True, verbose_name=b'google civic primary party', blank=True)),
('special', models.CharField(max_length=254, null=True, verbose_name=b'google civic primary party', blank=True)),
('was_processed', models.BooleanField(default=False, verbose_name=b'is primary election')),
],
),
migrations.CreateModel(
name='GoogleCivicElection',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('google_civic_election_id', models.CharField(unique=True, max_length=20, verbose_name=b'google civic election id')),
('we_vote_election_id', models.CharField(max_length=20, unique=True, null=True, verbose_name=b'we vote election id', blank=True)),
('name', models.CharField(max_length=254, verbose_name=b'google civic election name')),
('election_day', models.CharField(max_length=254, verbose_name=b'google civic election day')),
('was_processed', models.BooleanField(default=False, verbose_name=b'is primary election')),
],
),
]
| 88.340659
| 163
| 0.674711
| 1,004
| 8,039
| 5.205179
| 0.103586
| 0.119977
| 0.121699
| 0.197474
| 0.854573
| 0.844814
| 0.808649
| 0.799082
| 0.761194
| 0.759089
| 0
| 0.021293
| 0.199652
| 8,039
| 90
| 164
| 89.322222
| 0.790954
| 0.002612
| 0
| 0.488095
| 0
| 0
| 0.296657
| 0.044411
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02381
| 0
| 0.059524
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
afa4fc125e02492254a3c7d912fe3c9437fcd289
| 104
|
py
|
Python
|
hisa/capsule/capsule.py
|
rittikaadhikari/stock-recommendation
|
1f14276a955301b1c6fa1c00bd88b00cf5668d8c
|
[
"MIT"
] | null | null | null |
hisa/capsule/capsule.py
|
rittikaadhikari/stock-recommendation
|
1f14276a955301b1c6fa1c00bd88b00cf5668d8c
|
[
"MIT"
] | null | null | null |
hisa/capsule/capsule.py
|
rittikaadhikari/stock-recommendation
|
1f14276a955301b1c6fa1c00bd88b00cf5668d8c
|
[
"MIT"
] | null | null | null |
from six import with_metaclass
from abc import ABCMeta
class Capsule(with_metaclass(ABCMeta)):
pass
| 14.857143
| 40
| 0.807692
| 15
| 104
| 5.466667
| 0.666667
| 0.317073
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.144231
| 104
| 6
| 41
| 17.333333
| 0.921348
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
afaa2355b4d151fc4ab230e408b2aad75a508f4f
| 159
|
py
|
Python
|
datasets/__init__.py
|
yubin1219/Semantic-Seg
|
c40bd43d3d7e44bc995b8d041736580dec084251
|
[
"BSD-2-Clause"
] | null | null | null |
datasets/__init__.py
|
yubin1219/Semantic-Seg
|
c40bd43d3d7e44bc995b8d041736580dec084251
|
[
"BSD-2-Clause"
] | null | null | null |
datasets/__init__.py
|
yubin1219/Semantic-Seg
|
c40bd43d3d7e44bc995b8d041736580dec084251
|
[
"BSD-2-Clause"
] | null | null | null |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from .cityscapes import Cityscapes as cityscapes
| 26.5
| 48
| 0.874214
| 20
| 159
| 6.25
| 0.45
| 0.24
| 0.384
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.119497
| 159
| 5
| 49
| 31.8
| 0.892857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0.25
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
afacbc72667948ebf6d0c48f3bf3de40ac0b15ac
| 21,535
|
py
|
Python
|
testscripts/RDKB/component/WIFIAgent/TS_WIFIAGENT_ForceDisable_CheckRadioEnable_InBridgeMode.py
|
rdkcmf/rdkb-tools-tdkb
|
9f9c3600cd701d5fc90ac86a6394ebd28d49267e
|
[
"Apache-2.0"
] | null | null | null |
testscripts/RDKB/component/WIFIAgent/TS_WIFIAGENT_ForceDisable_CheckRadioEnable_InBridgeMode.py
|
rdkcmf/rdkb-tools-tdkb
|
9f9c3600cd701d5fc90ac86a6394ebd28d49267e
|
[
"Apache-2.0"
] | null | null | null |
testscripts/RDKB/component/WIFIAgent/TS_WIFIAGENT_ForceDisable_CheckRadioEnable_InBridgeMode.py
|
rdkcmf/rdkb-tools-tdkb
|
9f9c3600cd701d5fc90ac86a6394ebd28d49267e
|
[
"Apache-2.0"
] | null | null | null |
##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2020 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
'''
<?xml version='1.0' encoding='utf-8'?>
<xml>
<id></id>
<!-- Do not edit id. This will be auto filled while exporting. If you are adding a new script keep the id empty -->
<version>3</version>
<!-- Do not edit version. This will be auto incremented while updating. If you are adding a new script you can keep the vresion as 1 -->
<name>TS_WIFIAGENT_ForceDisable_CheckRadioEnable_InBridgeMode</name>
<!-- If you are adding a new script you can specify the script name. Script Name should be unique same as this file name with out .py extension -->
<primitive_test_id></primitive_test_id>
<!-- Do not change primitive_test_id if you are editing an existing script. -->
<primitive_test_name>WIFIAgent_Get</primitive_test_name>
<!-- -->
<primitive_test_version>1</primitive_test_version>
<!-- -->
<status>FREE</status>
<!-- -->
<synopsis>To check if 2.4G and 5G radio gets disabled when WiFi Force Disable is enabled in bridge mode</synopsis>
<!-- -->
<groups_id />
<!-- -->
<execution_time>15</execution_time>
<!-- -->
<long_duration>false</long_duration>
<!-- -->
<advanced_script>false</advanced_script>
<!-- execution_time is the time out time for test execution -->
<remarks></remarks>
<!-- Reason for skipping the tests if marked to skip -->
<skip>false</skip>
<!-- -->
<box_types>
<box_type>Broadband</box_type>
<!-- -->
</box_types>
<rdk_versions>
<rdk_version>RDKB</rdk_version>
<!-- -->
</rdk_versions>
<test_cases>
<test_case_id>TC_WIFIAGENT_128</test_case_id>
<test_objective>This test case is to check if 2.4G and 5G radio gets disabled when WiFi Force Disable is enabled in bridge mode</test_objective>
<test_type>Positive</test_type>
<test_setup>Broadband</test_setup>
<pre_requisite>1.Ccsp Components in DUT should be in a running state that includes component under test Cable Modem
2.TDK Agent should be in running state or invoke it through StartTdk.sh script</pre_requisite>
<api_or_interface_used>WIFIAgent_Get
WIFIAgent_Set</api_or_interface_used>
<input_parameters>Device.WiFi.Radio.1.Enable
Device.WiFi.Radio.2.Enable
Device.WiFi.X_RDK-CENTRAL_COM_ForceDisable
Device.X_CISCO_COM_DeviceControl.LanManagementEntry.1.LanMode</input_parameters>
<automation_approch>1.Load the module
2.Get the current lan mode and set the mode to bridge-static
3.Get the current status of Device.WiFi.X_RDK-CENTRAL_COM_ForceDisable,Device.WiFi.Radio.1.Enable and Device.WiFi.Radio.2.Enable
4.Enable Device.WiFi.X_RDK-CENTRAL_COM_ForceDisable
5.Device.WiFi.Radio.1.Enable and Device.WiFi.Radio.2.Enable should be disabled
6.Revert the Device.WiFi.X_RDK-CENTRAL_COM_ForceDisable to previous
7.Verify that Device.WiFi.Radio.1.Enable and Device.WiFi.Radio.2.Enable also go to previous after revert operation
8.Revert the LAN mode to previous state
7.Unload the module</automation_approch>
<expected_output>On Enabling Device.WiFi.X_RDK-CENTRAL_COM_ForceDisable 2.4G and 5G radio should be disabled in bridge-mode</expected_output>
<priority>High</priority>
<test_stub_interface>WIFIAGENT</test_stub_interface>
<test_script>TS_WIFIAGENT_ForceDisable_CheckRadioEnable_InBridgeMode</test_script>
<skipped>No</skipped>
<release_version>M84</release_version>
<remarks>None</remarks>
</test_cases>
<script_tags />
</xml>
'''
# use tdklib library,which provides a wrapper for tdk testcase script
import tdklib;
from time import sleep;
#Test component to be tested
obj = tdklib.TDKScriptingLibrary("wifiagent","1");
#IP and Port of box, No need to change,
#This will be replaced with corresponding DUT Ip and port while executing script
ip = <ipaddress>
port = <port>
obj.configureTestCase(ip,port,'TS_WIFIAGENT_ForceDisable_CheckRadioEnable_InBridgeMode');
#result of connection with test component and DUT
result =obj.getLoadModuleResult();
loadmodulestatus=obj.getLoadModuleResult();
if "SUCCESS" in loadmodulestatus.upper():
#Set the result status of execution
obj.setLoadModuleStatus("SUCCESS");
expectedresult ="SUCCESS";
tdkTestObj = obj.createTestStep('WIFIAgent_Get');
tdkTestObj.addParameter("paramName","Device.X_CISCO_COM_DeviceControl.LanManagementEntry.1.LanMode")
tdkTestObj.executeTestCase("expectedresult");
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
defaultLANmode = details.split("VALUE:")[1].split(' ')[0];
if expectedresult in actualresult:
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 1: Get the current LAN mode";
print "EXPECTED RESULT 1: Should get the current LAN mode";
print "ACTUAL RESULT 1: default LAN mode is %s" %defaultLANmode;
print "[TEST EXECUTION RESULT] : SUCCESS";
mode = "bridge-static";
tdkTestObj = obj.createTestStep('WIFIAgent_Set');
tdkTestObj.addParameter("paramName","Device.X_CISCO_COM_DeviceControl.LanManagementEntry.1.LanMode")
tdkTestObj.addParameter("paramValue", mode)
tdkTestObj.addParameter("paramType","string")
tdkTestObj.executeTestCase("expectedresult");
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 2: Change lanmode to %s " %mode
print "EXPECTED RESULT 2: Should change lanmode to %s" %mode
print "ACTUAL RESULT 2: Details: %s " %details;
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
#sleep to reflect the mode change
sleep(90);
tdkTestObj = obj.createTestStep('WIFIAgent_Get');
tdkTestObj.addParameter("paramName","Device.WiFi.Radio.1.Enable")
tdkTestObj.executeTestCase("expectedresult");
actualresult = tdkTestObj.getResult();
defaultRadio1 = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
defaultRadio1 = defaultRadio1.split("VALUE:")[1].split(" ")[0].strip();
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 3: Get the Radio Enable status for 2.4GHz";
print "EXPECTED RESULT 3: Should get the Radio Enable status for 2.4GHz";
print "ACTUAL RESULT 3: Radio Enable status for 2.4GHz state is %s" %defaultRadio1;
print "[TEST EXECUTION RESULT] : SUCCESS";
tdkTestObj = obj.createTestStep('WIFIAgent_Get');
tdkTestObj.addParameter("paramName","Device.WiFi.Radio.2.Enable")
tdkTestObj.executeTestCase("expectedresult");
actualresult = tdkTestObj.getResult();
defaultRadio2 = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
defaultRadio2 = defaultRadio2.split("VALUE:")[1].split(" ")[0].strip();
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 4: Get the Radio Enable status for 5GHz";
print "EXPECTED RESULT 4: Should get the Radio Enable status for 5GHz";
print "ACTUAL RESULT 4: Radio Enable status for 5GHz state is %s" %defaultRadio2;
print "[TEST EXECUTION RESULT] : SUCCESS";
tdkTestObj = obj.createTestStep('WIFIAgent_Get');
tdkTestObj.addParameter("paramName","Device.WiFi.X_RDK-CENTRAL_COM_ForceDisable")
tdkTestObj.executeTestCase("expectedresult");
actualresult = tdkTestObj.getResult();
default = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
default = default.split("VALUE:")[1].split(" ")[0].strip();
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 5: Get the current WiFi Force Disable state";
print "EXPECTED RESULT 5: Should get current WiFi Force Disable state";
print "ACTUAL RESULT 5: current WiFi Force Disable state is %s" %default;
print "[TEST EXECUTION RESULT] : SUCCESS";
tdkTestObj = obj.createTestStep('WIFIAgent_Set');
tdkTestObj.addParameter("paramName","Device.WiFi.X_RDK-CENTRAL_COM_ForceDisable")
tdkTestObj.addParameter("paramValue", "true");
tdkTestObj.addParameter("paramType","boolean")
tdkTestObj.executeTestCase("expectedresult");
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 6: Enable the WiFi Force Disable";
print "EXPECTED RESULT 6: Should enable Force Disable state";
print "ACTUAL RESULT 6: %s" %details;
print "[TEST EXECUTION RESULT] : SUCCESS";
tdkTestObj = obj.createTestStep('WIFIAgent_Get');
tdkTestObj.addParameter("paramName","Device.WiFi.Radio.1.Enable")
tdkTestObj.executeTestCase("expectedresult");
actualresult = tdkTestObj.getResult();
Radio1 = tdkTestObj.getResultDetails();
if expectedresult in actualresult and "false" in Radio1:
Radio1 = Radio1.split("VALUE:")[1].split(" ")[0].strip();
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 7: Get the Radio Enable status for 2.4GHz as false";
print "EXPECTED RESULT 7: Should get the Radio Enable status for 2.4GHz as false";
print "ACTUAL RESULT 7: Radio Enable status for 2.4GHz state is %s" %Radio1;
print "[TEST EXECUTION RESULT] : SUCCESS";
tdkTestObj = obj.createTestStep('WIFIAgent_Get');
tdkTestObj.addParameter("paramName","Device.WiFi.Radio.2.Enable")
tdkTestObj.executeTestCase("expectedresult");
actualresult = tdkTestObj.getResult();
Radio2 = tdkTestObj.getResultDetails();
if expectedresult in actualresult and "false" in Radio2:
Radio2 = Radio2.split("VALUE:")[1].split(" ")[0].strip();
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 8: Get the Radio Enable status for 5GHz as false";
print "EXPECTED RESULT 8: Should get the Radio Enable status for 5GHz as false";
print "ACTUAL RESULT 8: Radio Enable status for 5GHz state is %s" %Radio2;
print "[TEST EXECUTION RESULT] : SUCCESS";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 8: Get the Radio Enable status for 5GHz as false";
print "EXPECTED RESULT 8: Should get the Radio Enable status for 5GHz as false";
print "ACTUAL RESULT 8: Radio Enable status for 5GHz state is %s" %Radio2;
print "[TEST EXECUTION RESULT] : FAILURE";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 7: Get the Radio Enable status for 2.4GHz as false";
print "EXPECTED RESULT 7: Should get the Radio Enable status for 2.4GHz as false";
print "ACTUAL RESULT 7: Radio Enable status for 2.4GHz state is %s" %Radio1;
print "[TEST EXECUTION RESULT] : FAILURE";
#Revert the value
tdkTestObj = obj.createTestStep('WIFIAgent_Set');
tdkTestObj.addParameter("paramName","Device.WiFi.X_RDK-CENTRAL_COM_ForceDisable")
tdkTestObj.addParameter("paramValue", default);
tdkTestObj.addParameter("paramType","boolean")
tdkTestObj.executeTestCase("expectedresult");
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 9: Revert the WiFi Force Disable status to previous";
print "EXPECTED RESULT 9: Should disable WiFi Force Disable status to %s" %default;
print "ACTUAL RESULT 9: %s" %details;
print "[TEST EXECUTION RESULT] : SUCCESS";
tdkTestObj = obj.createTestStep('WIFIAgent_Get');
tdkTestObj.addParameter("paramName","Device.WiFi.Radio.1.Enable")
tdkTestObj.executeTestCase("expectedresult");
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult and defaultRadio1 in details:
details = details.split("VALUE:")[1].split(" ")[0].strip();
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 10: Check if Radio enable status for 2.4GHz is in previous state after reverting WiFi Force Disable";
print "EXPECTED RESULT 10: Radio enable status for 2.4GHz should be in previous state after reverting WiFi Force Disable";
print "ACTUAL RESULT 10: default value was :%s and after revertion %s" %(defaultRadio1,details)
print "[TEST EXECUTION RESULT] : SUCCESS";
tdkTestObj = obj.createTestStep('WIFIAgent_Get');
tdkTestObj.addParameter("paramName","Device.WiFi.Radio.2.Enable")
tdkTestObj.executeTestCase("expectedresult");
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult and defaultRadio2 in details:
details = details.split("VALUE:")[1].split(" ")[0].strip();
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 11: Check if Radio enable status for 5GHz is in previous state after reverting WiFi Force Disable";
print "EXPECTED RESULT 11: Radio enable status for 5GHz should be in previous state after reverting WiFi Force Disable";
print "ACTUAL RESULT 11: default value was :%s and after revertion %s" %(defaultRadio2,details)
print "[TEST EXECUTION RESULT] : SUCCESS";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 11: Check if Radio enable status for 5GHz is in previous state after reverting WiFi Force Disable";
print "EXPECTED RESULT 11: Radio enable status for 5GHz should be in previous state after reverting WiFi Force Disable";
print "ACTUAL RESULT 11: default value was :%s and after revertion %s" %(defaultRadio2,details)
print "[TEST EXECUTION RESULT] : FAILURE";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 10: Check if Radio enable status for 2.4GHz is in previous state after reverting WiFi Force Disable";
print "EXPECTED RESULT 10: Radio enable status for 2.4GHz should be in previous state after reverting WiFi Force Disable";
print "ACTUAL RESULT 10: default value was :%s and after revertion %s" %(defaultRadio1,details)
print "[TEST EXECUTION RESULT] : FAILURE";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 9: Revert the WiFi Force Disable status to previous";
print "EXPECTED RESULT 9: Should disable WiFi Force Disable status to %s" %default;
print "ACTUAL RESULT 9: %s" %details;
print "[TEST EXECUTION RESULT] : FAILURE";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 6: Enable the WiFi Force Disable";
print "EXPECTED RESULT 6: Should enable Force Disable state";
print "ACTUAL RESULT 6: %s" %details;
print "[TEST EXECUTION RESULT] : FAILURE";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 5: Get the current WiFi Force Disable state";
print "EXPECTED RESULT 5: Should get current WiFi Force Disable state";
print "ACTUAL RESULT 5: current WiFi Force Disable state is %s" %default;
print "[TEST EXECUTION RESULT] : FAILURE";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 4: Get the Radio Enable status for 5GHz";
print "EXPECTED RESULT 4: Should get the Radio Enable status for 5GHz";
print "ACTUAL RESULT 4: Radio Enable status for 5GHz state is %s" %defaultRadio2;
print "[TEST EXECUTION RESULT] : FAILURE";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 3: Get the Radio Enable status for 2.4GHz";
print "EXPECTED RESULT 3: Should get the Radio Enable status for 2.4GHz";
print "ACTUAL RESULT 3: Radio Enable status for 2.4GHz state is %s" %defaultRadio1;
print "[TEST EXECUTION RESULT] : FAILURE";
#Revert to previous lan mode
tdkTestObj = obj.createTestStep('WIFIAgent_Set');
tdkTestObj.addParameter("paramName","Device.X_CISCO_COM_DeviceControl.LanManagementEntry.1.LanMode")
tdkTestObj.addParameter("paramValue", defaultLANmode)
tdkTestObj.addParameter("paramType","string")
tdkTestObj.executeTestCase("expectedresult");
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP 12: Change lanmode to the previous"
print "EXPECTED RESULT 12: Should change lanmode to %s" %defaultLANmode
print "ACTUAL RESULT 12: Details: %s " %details;
#Get the result of execution
print "[TEST EXECUTION RESULT] : SUCCESS";
#sleep for change in mode reflection
sleep(90);
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 12: Change lanmode to the previous"
print "EXPECTED RESULT 12: Should change lanmode to %s" %defaultLANmode
print "ACTUAL RESULT 12: Details: %s " %details;
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 2: Change lanmode to %s" %mode
print "EXPECTED RESULT 2: Should change lanmode to %s" %mode
print "ACTUAL RESULT 2: Details: %s " %details;
#Get the result of execution
print "[TEST EXECUTION RESULT] : FAILURE";
else:
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP 1: Get the current LAN mode";
print "EXPECTED RESULT 1: Should get the current LAN mode";
print "ACTUAL RESULT 1: default LAN mode is %s" %defaultLANmode;
print "[TEST EXECUTION RESULT] : FAILURE";
obj.unloadModule("wifiagent")
else:
print "Failed to load wifiagent module";
obj.setLoadModuleStatus("FAILURE");
| 59.325069
| 152
| 0.611563
| 2,310
| 21,535
| 5.645022
| 0.12684
| 0.033129
| 0.041718
| 0.04908
| 0.766564
| 0.748313
| 0.72477
| 0.717408
| 0.704064
| 0.681672
| 0
| 0.016214
| 0.295472
| 21,535
| 362
| 153
| 59.48895
| 0.843264
| 0.061017
| 0
| 0.857143
| 0
| 0.016807
| 0.378729
| 0.031211
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.008403
| null | null | 0.407563
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
afaf0c0d22e6116d8d6a7dbdab58a5db57b0b80d
| 43,419
|
py
|
Python
|
graphene_django_cud/mutations.py
|
martasd/graphene-django-cud
|
29f62579e1f5cc6c3130a4a8e13cf8059ccb19f2
|
[
"MIT"
] | null | null | null |
graphene_django_cud/mutations.py
|
martasd/graphene-django-cud
|
29f62579e1f5cc6c3130a4a8e13cf8059ccb19f2
|
[
"MIT"
] | null | null | null |
graphene_django_cud/mutations.py
|
martasd/graphene-django-cud
|
29f62579e1f5cc6c3130a4a8e13cf8059ccb19f2
|
[
"MIT"
] | null | null | null |
from collections import OrderedDict
import graphene
from django.core.exceptions import FieldDoesNotExist, ObjectDoesNotExist
from django.db import models, transaction
from graphene import Mutation, InputObjectType
from graphene.types.mutation import MutationOptions
from graphene.types.utils import yank_fields_from_attrs
from graphene.utils.str_converters import to_snake_case
from graphene_django.registry import get_global_registry
from graphql import GraphQLError
from graphql_relay import to_global_id
from graphene_django_cud.registry import get_type_meta_registry
from .util import disambiguate_id, disambiguate_ids, get_input_fields_for_model, \
get_all_optional_input_fields_for_model, is_many_to_many, get_m2m_all_extras_field_names, \
get_likely_operation_from_name, get_fk_all_extras_field_names, get_filter_fields_input_args
meta_registry = get_type_meta_registry()
class DjangoCudBase(Mutation):
class Meta:
abstract = True
@classmethod
def get_or_create_foreign_obj(
cls,
field,
value,
data,
info
):
field_type = data.get('type', 'ID')
if field_type == "ID":
return value
else:
input_type_meta = meta_registry.get_meta_for_type(field_type)
# Create new obj
related_obj = cls.create_obj(
value,
info,
input_type_meta.get('auto_context_fields', {}),
input_type_meta.get('many_to_many_extras', {}),
input_type_meta.get('foreign_key_extras', {}),
input_type_meta.get('many_to_one_extras', {}),
field.related_model
)
return related_obj.id
@classmethod
def get_or_create_m2m_objs(
cls,
field,
values,
data,
operation,
info
):
results = []
if not values:
return results
if isinstance(data, bool):
data = {}
field_type = data.get('type', 'ID')
for value in values:
if field_type == "ID":
related_obj = field.related_model.objects.get(pk=disambiguate_id(value))
else:
# This is something that we are going to create
input_type_meta = meta_registry.get_meta_for_type(field_type)
# Create new obj
related_obj = cls.create_obj(
value,
info,
input_type_meta.get('auto_context_fields', {}),
input_type_meta.get('many_to_many_extras', {}),
input_type_meta.get('foreign_key_extras', {}),
input_type_meta.get('many_to_one_extras', {}),
field.related_model
)
results.append(related_obj)
return results
@classmethod
def get_or_create_m2o_objs(
cls,
obj,
field,
values,
data,
operation,
info,
Model
):
results = []
if not values:
return results
field_type = data.get('type', 'auto')
for value in values:
if field_type == "ID":
related_obj = field.related_model.objects.get(pk=disambiguate_id(value))
elif field_type == "auto":
# In this case, a new type has been created for us. Let's first find it's name,
# then get it's meta, and then create it. We also need to attach the obj as the
# foreign key.
_type_name = data.get('type_name', f"Create{Model.__name__}{field.name.capitalize()}")
input_type_meta = meta_registry.get_meta_for_type(field_type)
# .id has to be called here, as the regular input for a foreignkey is ID!
value[field.field.name] = obj.id
related_obj = cls.create_obj(
value,
info,
input_type_meta.get('auto_context_fields', {}),
input_type_meta.get('many_to_many_extras', {}),
input_type_meta.get('foreign_key_extras', {}),
input_type_meta.get('many_to_one_extras', {}),
field.related_model
)
else:
# This is something that we are going to create
input_type_meta = meta_registry.get_meta_for_type(field_type)
# Create new obj
related_obj = cls.create_obj(
value,
info,
input_type_meta.get('auto_context_fields', {}),
input_type_meta.get('many_to_many_extras', {}),
input_type_meta.get('foreign_key_extras', {}),
input_type_meta.get('many_to_one_extras', {}),
field.related_model
)
return []
@classmethod
def create_obj(
cls,
input,
info,
auto_context_fields,
many_to_many_extras,
foreign_key_extras,
many_to_one_extras,
Model
):
meta_registry = get_type_meta_registry()
model_field_values = {}
many_to_many_values = {}
many_to_many_extras_field_names = get_m2m_all_extras_field_names(many_to_many_extras)
many_to_one_extras_field_names = get_m2m_all_extras_field_names(many_to_one_extras) # The layout is the same as for m2m
foreign_key_extras_field_names = get_fk_all_extras_field_names(foreign_key_extras)
for field_name, context_name in auto_context_fields.items():
if hasattr(info.context, context_name):
model_field_values[field_name] = getattr(info.context, context_name)
for name, value in super(type(input), input).items():
# Handle these separately
if name in many_to_many_extras_field_names or name in foreign_key_extras_field_names or name in many_to_one_extras_field_names:
continue
field = Model._meta.get_field(name)
new_value = value
# We have to handle this case specifically, by using the fields
# .set()-method, instead of direct assignment
field_is_many_to_many = is_many_to_many(field)
value_handle_name = "handle_" + name
if hasattr(cls, value_handle_name):
handle_func = getattr(cls, value_handle_name)
assert callable(
handle_func
), f"Property {value_handle_name} on {cls.__name__} is not a function."
new_value = handle_func(value, name, info)
# On some fields we perform some default conversion, if the value was not transformed above.
if new_value == value and value is not None:
if type(field) in (models.ForeignKey, models.OneToOneField):
# Delete auto context field here, if it exists. We have to do this explicitly
# as we change the name below
if name in auto_context_fields:
del model_field_values[name]
name = getattr(field, "db_column", None) or name + "_id"
new_value = disambiguate_id(value)
elif field_is_many_to_many:
new_value = disambiguate_ids(value)
if field_is_many_to_many:
many_to_many_values[name] = new_value
else:
model_field_values[name] = new_value
# We don't have an object yet, and we potentially need to create
# parents before proceeding.
for name, extras in foreign_key_extras.items():
value = input.get(name, None)
field = Model._meta.get_field(name)
obj_id = cls.get_or_create_foreign_obj(
field,
value,
extras,
info
)
model_field_values[name + "_id"] = obj_id
# Foreign keys are added, we are ready to create our object
obj = Model.objects.create(**model_field_values)
for name, values in many_to_many_values.items():
getattr(obj, name).set(values)
# Handle extras fields
many_to_many_to_add = {}
many_to_many_to_remove = {}
for name, extras in many_to_many_extras.items():
field = Model._meta.get_field(name)
if not name in many_to_many_to_add:
many_to_many_to_add[name] = []
many_to_many_to_remove[name] = []
for extra_name, data in extras.items():
field_name = name
if extra_name != "exact":
field_name = name + "_" + extra_name
values = input.get(field_name, None)
if isinstance(data, bool):
data = {}
operation = data.get('operation') or get_likely_operation_from_name(extra_name)
objs = cls.get_or_create_m2m_objs(
field,
values,
data,
operation,
info
)
if len(objs) > 0:
if operation == "add":
many_to_many_to_add[name] += objs
else:
many_to_many_to_remove[name] += objs
many_to_one_to_add = {}
many_to_one_to_remove = {}
for name, extras in many_to_one_extras.items():
field = Model._meta.get_field(name)
if not name in many_to_one_to_add:
many_to_one_to_add[name] = []
many_to_one_to_remove[name] = []
for extra_name, data in extras.items():
field_name = name
if extra_name != "exact":
field_name = name + "_" + extra_name
values = input.get(field_name, None)
if isinstance(data, bool):
data = {}
operation = data.get('operation') or get_likely_operation_from_name(extra_name)
if operation == "add":
objs = cls.get_or_create_m2o_objs(
obj,
field,
values,
data,
operation,
info,
Model
)
many_to_one_to_add[name] += objs
else:
many_to_one_to_remove[name] += disambiguate_ids(values)
for name, objs in many_to_one_to_add.items():
getattr(obj, name).add(*objs)
for name, objs in many_to_one_to_remove.items():
# Only nullable foreign key reverse rels have the remove method,
# so we use this method instead
getattr(obj, name).filter(id__in=objs).delete()
for name, objs in many_to_many_to_add.items():
getattr(obj, name).add(*objs)
for name, objs in many_to_many_to_remove.items():
getattr(obj, name).remove(*objs)
return obj
@classmethod
def update_obj(
cls,
obj,
input,
info,
auto_context_fields,
many_to_many_extras,
foreign_key_extras,
many_to_one_extras,
Model
):
many_to_many_values = {}
many_to_many_add_values = {}
many_to_many_remove_values = {}
many_to_many_extras_field_names = get_m2m_all_extras_field_names(many_to_many_extras)
many_to_one_extras_field_names = get_m2m_all_extras_field_names(many_to_one_extras) # The layout is the same as for m2m
foreign_key_extras_field_names = get_fk_all_extras_field_names(foreign_key_extras)
for field_name, context_name in auto_context_fields.items():
if hasattr(info.context, context_name):
setattr(obj, field_name, getattr(info.context, context_name))
for name, value in super(type(input), input).items():
# Handle these separately
if name in many_to_many_extras_field_names or name in foreign_key_extras_field_names or name in many_to_one_extras_field_names:
continue
field = Model._meta.get_field(name)
new_value = value
# We have to handle this case specifically, by using the fields
# .set()-method, instead of direct assignment
field_is_many_to_many = is_many_to_many(field)
value_handle_name = "handle_" + name
if hasattr(cls, value_handle_name):
handle_func = getattr(cls, value_handle_name)
assert callable(
handle_func
), f"Property {value_handle_name} on {cls.__name__} is not a function."
new_value = handle_func(value, name, info)
# On some fields we perform some default conversion, if the value was not transformed above.
if new_value == value and value is not None:
if type(field) in (models.ForeignKey, models.OneToOneField):
# Delete auto context field here, if it exists. We have to do this explicitly
# as we change the name below
if name in auto_context_fields:
setattr(obj, name, None)
name = getattr(field, "db_column", None) or name + "_id"
new_value = disambiguate_id(value)
elif field_is_many_to_many:
new_value = disambiguate_ids(value)
if field_is_many_to_many:
many_to_many_values[name] = new_value
else:
setattr(obj, name, new_value)
# Handle extras fields
for name, extras in foreign_key_extras.items():
value = input.get(name, None)
field = Model._meta.get_field(name)
obj_id = cls.get_or_create_foreign_obj(
field,
value,
extras,
info
)
setattr(obj, name + "_id", obj_id)
many_to_many_to_add = {}
many_to_many_to_remove = {}
for name, extras in many_to_many_extras.items():
field = Model._meta.get_field(name)
if not name in many_to_many_to_add:
many_to_many_to_add[name] = []
many_to_many_to_remove[name] = []
for extra_name, data in extras.items():
field_name = name
if extra_name != "exact":
field_name = name + "_" + extra_name
values = input.get(field_name, None)
if isinstance(data, bool):
data = {}
operation = data.get('operation') or get_likely_operation_from_name(extra_name)
objs = cls.get_or_create_m2m_objs(
field,
values,
data,
operation,
info
)
if operation == "add":
many_to_many_to_add[name] += objs
else:
many_to_many_to_remove[name] += objs
many_to_one_to_add = {}
many_to_one_to_remove = {}
for name, extras in many_to_one_extras.items():
field = Model._meta.get_field(name)
if not name in many_to_one_to_add:
many_to_one_to_add[name] = []
many_to_one_to_remove[name] = []
for extra_name, data in extras.items():
field_name = name
if extra_name != "exact":
field_name = name + "_" + extra_name
values = input.get(field_name, None)
if isinstance(data, bool):
data = {}
operation = data.get('operation') or get_likely_operation_from_name(extra_name)
if operation == "add":
objs = cls.get_or_create_m2o_objs(
obj,
field,
values,
data,
operation,
info,
Model
)
many_to_one_to_add[name] += objs
else:
many_to_one_to_remove[name] += disambiguate_ids(values)
for name, objs in many_to_one_to_add.items():
getattr(obj, name).add(*objs)
for name, objs in many_to_one_to_remove.items():
# Only nullable foreign key reverse rels have the remove method,
# so we use this method instead
getattr(obj, name).filter(id__in=objs).delete()
for name, objs in many_to_many_to_add.items():
getattr(obj, name).add(*objs)
for name, objs in many_to_many_to_remove.items():
getattr(obj, name).remove(*objs)
return obj
class DjangoUpdateMutationOptions(MutationOptions):
model = None
only_fields = None
exclude_fields = None
return_field_name = None
permissions = None
login_required = None
auto_context_fields = None
optional_fields = ()
required_fields = None
nested_fields = None
type_name = None
many_to_many_extras = None
many_to_one_extras=None
foreign_key_extras = None
class DjangoUpdateMutation(DjangoCudBase):
class Meta:
abstract = True
@classmethod
def __init_subclass_with_meta__(
cls,
model=None,
permissions=None,
login_required=None,
only_fields=(),
exclude_fields=(),
auto_context_fields={},
optional_fields=(),
required_fields=(),
return_field_name=None,
many_to_many_extras=None,
many_to_one_extras=None,
foreign_key_extras=None,
type_name="",
**kwargs,
):
registry = get_global_registry()
meta_registry = get_type_meta_registry()
model_type = registry.get_type_for_model(model)
assert model_type, f"Model type must be registered for model {model}"
if not return_field_name:
return_field_name = to_snake_case(model.__name__)
if many_to_one_extras is None:
many_to_one_extras = {}
if foreign_key_extras is None:
foreign_key_extras = {}
if many_to_many_extras is None:
many_to_many_extras = {}
input_type_name = type_name or f"Update{model.__name__}Input"
model_fields = get_input_fields_for_model(
model,
only_fields,
exclude_fields,
optional_fields=tuple(auto_context_fields.keys()) + optional_fields,
required_fields=required_fields,
many_to_many_extras=many_to_many_extras,
foreign_key_extras=foreign_key_extras,
many_to_one_extras=many_to_one_extras,
parent_type_name=input_type_name
)
InputType = type(
input_type_name, (InputObjectType,), model_fields
)
# Register meta-data
meta_registry.register(
input_type_name,
{
'auto_context_fields': auto_context_fields or {},
'optional_fields': optional_fields,
'required_fields': required_fields,
'many_to_many_extras': many_to_many_extras or {},
'many_to_one_extras': many_to_one_extras or {},
'foreign_key_extras': foreign_key_extras or {}
}
)
registry.register_converted_field(
input_type_name,
InputType
)
arguments = OrderedDict(
id=graphene.ID(required=True), input=InputType(required=True)
)
output_fields = OrderedDict()
output_fields[return_field_name] = graphene.Field(model_type)
_meta = DjangoUpdateMutationOptions(cls)
_meta.model = model
_meta.fields = yank_fields_from_attrs(output_fields, _as=graphene.Field)
_meta.return_field_name = return_field_name
_meta.permissions = permissions
_meta.auto_context_fields = auto_context_fields or {}
_meta.optional_fields = optional_fields
_meta.required_fields = required_fields
_meta.InputType = InputType
_meta.input_type_name = input_type_name
_meta.many_to_many_extras = many_to_many_extras
_meta.many_to_one_extras = many_to_one_extras
_meta.foreign_key_extras = foreign_key_extras
_meta.login_required = _meta.login_required or (
_meta.permissions and len(_meta.permissions) > 0
)
super().__init_subclass_with_meta__(arguments=arguments, _meta=_meta, **kwargs)
def get_queryset(self):
Model = self._meta.model
return Model.objects
@classmethod
def mutate(cls, root, info, id, input):
if cls._meta.login_required and not info.context.user.is_authenticated:
raise GraphQLError("Must be logged in to access this mutation.")
if cls._meta.permissions and len(cls._meta.permissions) > 0:
if not info.context.user.has_perms(cls._meta.permissions):
raise GraphQLError("Not permitted to access this mutation.")
id = disambiguate_id(id)
Model = cls._meta.model
queryset = cls.get_queryset(Model)
obj = queryset.get(pk=id)
auto_context_fields = cls._meta.auto_context_fields or {}
obj = cls.update_obj(
obj,
input,
info,
auto_context_fields,
cls._meta.many_to_many_extras,
cls._meta.foreign_key_extras,
cls._meta.many_to_one_extras,
Model
)
obj.save()
kwargs = {cls._meta.return_field_name: obj}
return cls(**kwargs)
class DjangoPatchMutationOptions(MutationOptions):
model = None
only_fields = None
exclude_fields = None
return_field_name = None
permissions = None
login_required = None
auto_context_fields = None
many_to_many_extras = None
many_to_one_extras = None
foreign_key_extras = None
type_name = None
class DjangoPatchMutation(DjangoCudBase):
class Meta:
abstract = True
@classmethod
def __init_subclass_with_meta__(
cls,
model=None,
permissions=None,
login_required=None,
only_fields=(),
exclude_fields=(),
return_field_name=None,
auto_context_fields={},
many_to_one_extras = None,
many_to_many_extras = None,
foreign_key_extras = None,
type_name=None,
**kwargs,
):
registry = get_global_registry()
meta_registry = get_type_meta_registry()
model_type = registry.get_type_for_model(model)
assert model_type, f"Model type must be registered for model {model}"
if not return_field_name:
return_field_name = to_snake_case(model.__name__)
if many_to_one_extras is None:
many_to_one_extras = {}
if foreign_key_extras is None:
foreign_key_extras = {}
if many_to_many_extras is None:
many_to_many_extras = {}
input_type_name = type_name or f"Patch{model.__name__}Input"
model_fields = get_all_optional_input_fields_for_model(
model,
only_fields,
exclude_fields,
many_to_many_extras=many_to_many_extras,
foreign_key_extras=foreign_key_extras,
many_to_one_extras=many_to_one_extras,
parent_type_name=type_name,
)
InputType = type(
input_type_name, (InputObjectType,), model_fields
)
# Register meta-data
meta_registry.register(
input_type_name,
{
'auto_context_fields': auto_context_fields or {},
'many_to_many_extras': many_to_many_extras or {},
'many_to_one_extras': many_to_one_extras or {},
'foreign_key_extras': foreign_key_extras or {}
}
)
registry.register_converted_field(
input_type_name,
InputType
)
arguments = OrderedDict(
id=graphene.ID(required=True), input=InputType(required=True)
)
output_fields = OrderedDict()
output_fields[return_field_name] = graphene.Field(model_type)
_meta = DjangoPatchMutationOptions(cls)
_meta.model = model
_meta.fields = yank_fields_from_attrs(output_fields, _as=graphene.Field)
_meta.return_field_name = return_field_name
_meta.permissions = permissions
_meta.auto_context_fields = auto_context_fields or {}
_meta.InputType = InputType
_meta.input_type_name = input_type_name
_meta.many_to_many_extras = many_to_many_extras
_meta.many_to_one_extras = many_to_one_extras
_meta.foreign_key_extras = foreign_key_extras
_meta.login_required = _meta.login_required or (
_meta.permissions and len(_meta.permissions) > 0
)
super().__init_subclass_with_meta__(arguments=arguments, _meta=_meta, **kwargs)
def get_queryset(self):
Model = self._meta.model
return Model.objects
@classmethod
def mutate(cls, root, info, id, input):
if cls._meta.login_required and not info.context.user.is_authenticated:
raise GraphQLError("Must be logged in to access this mutation.")
if cls._meta.permissions and len(cls._meta.permissions) > 0:
if not info.context.user.has_perms(cls._meta.permissions):
raise GraphQLError("Not permitted to access this mutation.")
id = disambiguate_id(id)
Model = cls._meta.model
queryset = cls.get_queryset(Model)
obj = queryset.get(pk=id)
auto_context_fields = cls._meta.auto_context_fields or {}
obj = cls.update_obj(
obj,
input,
info,
auto_context_fields,
cls._meta.many_to_many_extras,
cls._meta.foreign_key_extras,
cls._meta.many_to_one_extras,
Model
)
obj.save()
kwargs = {cls._meta.return_field_name: obj}
return cls(**kwargs)
class DjangoCreateMutationOptions(MutationOptions):
model = None
only_fields = None
exclude_fields = None
return_field_name = None
permissions = None
login_required = None
auto_context_fields = None
optional_fields = ()
required_fields = ()
many_to_many_extras = None
many_to_one_extras = None
foreign_key_extras = None
type_name = None
class DjangoCreateMutation(DjangoCudBase):
class Meta:
abstract = True
@classmethod
def __init_subclass_with_meta__(
cls,
model=None,
permissions=None,
login_required=None,
only_fields=(),
exclude_fields=(),
optional_fields=(),
required_fields=(),
auto_context_fields={},
return_field_name=None,
many_to_many_extras=None,
foreign_key_extras = None,
many_to_one_extras = None,
type_name=None,
**kwargs,
):
registry = get_global_registry()
meta_registry = get_type_meta_registry()
model_type = registry.get_type_for_model(model)
if many_to_one_extras is None:
many_to_one_extras = {}
if foreign_key_extras is None:
foreign_key_extras = {}
if many_to_many_extras is None:
many_to_many_extras = {}
assert model_type, f"Model type must be registered for model {model}"
if not return_field_name:
return_field_name = to_snake_case(model.__name__)
input_type_name = type_name or f"Create{model.__name__}Input"
model_fields = get_input_fields_for_model(
model,
only_fields,
exclude_fields,
tuple(auto_context_fields.keys()) + optional_fields,
required_fields,
many_to_many_extras,
foreign_key_extras,
many_to_one_extras,
parent_type_name=input_type_name,
)
InputType = type(
input_type_name, (InputObjectType,), model_fields
)
# Register meta-data
meta_registry.register(
input_type_name,
{
'auto_context_fields': auto_context_fields or {},
'optional_fields': optional_fields,
'required_fields': required_fields,
'many_to_many_extras': many_to_many_extras or {},
'foreign_key_extras': foreign_key_extras or {}
}
)
registry.register_converted_field(
input_type_name,
InputType
)
arguments = OrderedDict(input=InputType(required=True))
output_fields = OrderedDict()
output_fields[return_field_name] = graphene.Field(model_type)
_meta = DjangoCreateMutationOptions(cls)
_meta.model = model
_meta.fields = yank_fields_from_attrs(output_fields, _as=graphene.Field)
_meta.return_field_name = return_field_name
_meta.optional_fields = optional_fields
_meta.required_fields = required_fields
_meta.permissions = permissions
_meta.auto_context_fields = auto_context_fields or {}
_meta.many_to_many_extras = many_to_many_extras or {}
_meta.foreign_key_extras = foreign_key_extras
_meta.many_to_one_extras = many_to_one_extras or {}
_meta.InputType = InputType
_meta.input_type_name = input_type_name
_meta.login_required = _meta.login_required or (
_meta.permissions and len(_meta.permissions) > 0
)
super().__init_subclass_with_meta__(arguments=arguments, _meta=_meta, **kwargs)
@classmethod
def mutate(cls, root, info, input):
if cls._meta.login_required and not info.context.user.is_authenticated:
raise GraphQLError("Must be logged in to access this mutation.")
if cls._meta.permissions and len(cls._meta.permissions) > 0:
if not info.context.user.has_perms(cls._meta.permissions):
raise GraphQLError("Not permitted to access this mutation.")
Model = cls._meta.model
model_field_values = {}
auto_context_fields = cls._meta.auto_context_fields or {}
obj = cls.create_obj(
input,
info,
auto_context_fields,
cls._meta.many_to_many_extras,
cls._meta.foreign_key_extras,
cls._meta.many_to_one_extras,
Model
)
kwargs = {cls._meta.return_field_name: obj}
return cls(**kwargs)
class DjangoBatchCreateMutationOptions(MutationOptions):
model = None
only_fields = None
exclude_fields = None
return_field_name = None
permissions = None
login_required = None
auto_context_fields = None
optional_fields = ()
required_fields = ()
many_to_many_extras = None
many_to_one_extras = None
foreign_key_extras = None
type_name = None
use_type_name = None
class DjangoBatchCreateMutation(DjangoCudBase):
class Meta:
abstract = True
@classmethod
def __init_subclass_with_meta__(
cls,
model=None,
permissions=None,
login_required=None,
only_fields=(),
exclude_fields=(),
optional_fields=(),
required_fields=(),
auto_context_fields={},
return_field_name=None,
many_to_many_extras=None,
foreign_key_extras = None,
many_to_one_extras = None,
type_name=None,
use_type_name=None,
**kwargs,
):
registry = get_global_registry()
meta_registry = get_type_meta_registry()
model_type = registry.get_type_for_model(model)
if many_to_one_extras is None:
many_to_one_extras = {}
if foreign_key_extras is None:
foreign_key_extras = {}
if many_to_many_extras is None:
many_to_many_extras = {}
assert model_type, f"Model type must be registered for model {model}"
if not return_field_name:
# Pluralize
return_field_name = to_snake_case(model.__name__) + "s"
if use_type_name:
input_type_name = use_type_name
InputType = registry.get_converted_field(
input_type_name
)
if not InputType:
raise GraphQLError(f"Could not find input type with name {input_type_name}")
else:
input_type_name = type_name or f"BatchCreate{model.__name__}Input"
model_fields = get_input_fields_for_model(
model,
only_fields,
exclude_fields,
tuple(auto_context_fields.keys()) + optional_fields,
required_fields,
many_to_many_extras,
foreign_key_extras,
many_to_one_extras,
parent_type_name=input_type_name,
)
InputType = type(
input_type_name, (InputObjectType,), model_fields
)
# Register meta-data
meta_registry.register(
input_type_name,
{
'auto_context_fields': auto_context_fields or {},
'optional_fields': optional_fields,
'required_fields': required_fields,
'many_to_many_extras': many_to_many_extras or {},
'foreign_key_extras': foreign_key_extras or {}
}
)
registry.register_converted_field(
input_type_name,
InputType
)
arguments = OrderedDict(input=graphene.List(InputType, required=True))
output_fields = OrderedDict()
output_fields[return_field_name] = graphene.List(model_type)
_meta = DjangoBatchCreateMutationOptions(cls)
_meta.model = model
_meta.fields = yank_fields_from_attrs(output_fields, _as=graphene.Field)
_meta.return_field_name = return_field_name
_meta.optional_fields = optional_fields
_meta.required_fields = required_fields
_meta.permissions = permissions
_meta.auto_context_fields = auto_context_fields or {}
_meta.many_to_many_extras = many_to_many_extras or {}
_meta.foreign_key_extras = foreign_key_extras
_meta.many_to_one_extras = many_to_one_extras or {}
_meta.InputType = InputType
_meta.input_type_name = input_type_name
_meta.login_required = _meta.login_required or (
_meta.permissions and len(_meta.permissions) > 0
)
super().__init_subclass_with_meta__(arguments=arguments, _meta=_meta, **kwargs)
@classmethod
def mutate(cls, root, info, input):
if cls._meta.login_required and not info.context.user.is_authenticated:
raise GraphQLError("Must be logged in to access this mutation.")
if cls._meta.permissions and len(cls._meta.permissions) > 0:
if not info.context.user.has_perms(cls._meta.permissions):
raise GraphQLError("Not permitted to access this mutation.")
Model = cls._meta.model
model_field_values = {}
auto_context_fields = cls._meta.auto_context_fields or {}
created_objs = []
with transaction.atomic():
for data in input:
obj = cls.create_obj(
data,
info,
auto_context_fields,
cls._meta.many_to_many_extras,
cls._meta.foreign_key_extras,
cls._meta.many_to_one_extras,
Model
)
created_objs.append(obj)
kwargs = {cls._meta.return_field_name: created_objs}
return cls(**kwargs)
class DjangoDeleteMutationOptions(MutationOptions):
model = None
permissions = None
login_required = None
class DjangoDeleteMutation(Mutation):
class Meta:
abstract = True
@classmethod
def __init_subclass_with_meta__(
cls,
model=None,
permissions=None,
login_required=None,
only_fields=(),
exclude_fields=(),
return_field_name=None,
**kwargs,
):
registry = get_global_registry()
if not return_field_name:
return_field_name = to_snake_case(model.__name__)
arguments = OrderedDict(id=graphene.ID(required=True))
output_fields = OrderedDict()
output_fields["found"] = graphene.Boolean()
output_fields["deleted_id"] = graphene.ID()
_meta = DjangoDeleteMutationOptions(cls)
_meta.model = model
_meta.fields = yank_fields_from_attrs(output_fields, _as=graphene.Field)
_meta.return_field_name = return_field_name
_meta.permissions = permissions
_meta.login_required = _meta.login_required or (
_meta.permissions and len(_meta.permissions) > 0
)
super().__init_subclass_with_meta__(arguments=arguments, _meta=_meta, **kwargs)
@classmethod
def mutate(cls, root, info, id):
if cls._meta.login_required and not info.context.user.is_authenticated:
raise GraphQLError("Must be logged in to access this mutation.")
if cls._meta.permissions and len(cls._meta.permissions) > 0:
if not info.context.user.has_perms(cls._meta.permissions):
raise GraphQLError("Not permitted to access this mutation.")
Model = cls._meta.model
id = disambiguate_id(id)
try:
obj = Model.objects.get(pk=id)
obj.delete()
return cls(found=True, deleted_id=id)
except ObjectDoesNotExist:
return cls(found=False)
class DjangoBatchDeleteMutationOptions(MutationOptions):
model = None
filter_fields = None
filter_class = None
permissions = None
login_required = None
class DjangoBatchDeleteMutation(Mutation):
class Meta:
abstract = True
@classmethod
def __init_subclass_with_meta__(
cls,
model=None,
permissions=None,
login_required=None,
filter_fields=(),
filter_class=None,
**kwargs,
):
registry = get_global_registry()
model_type = registry.get_type_for_model(model)
assert model_type, f"Model type must be registered for model {model}"
assert (
len(filter_fields) > 0
), f"You must specify at least one field to filter on for deletion."
input_arguments = get_filter_fields_input_args(
filter_fields,
model
)
InputType = type(
f"BatchDelete{model.__name__}Input", (InputObjectType,), input_arguments
)
arguments = OrderedDict(input=InputType(required=True))
output_fields = OrderedDict()
output_fields["deletion_count"] = graphene.Int()
output_fields["deleted_ids"] = graphene.List(graphene.ID)
_meta = DjangoBatchDeleteMutationOptions(cls)
_meta.model = model
_meta.fields = yank_fields_from_attrs(output_fields, _as=graphene.Field)
_meta.filter_fields = filter_fields
_meta.permissions = permissions
_meta.login_required = _meta.login_required or (
_meta.permissions and len(_meta.permissions) > 0
)
super().__init_subclass_with_meta__(arguments=arguments, _meta=_meta, **kwargs)
@classmethod
def mutate(cls, root, info, input):
if cls._meta.login_required and not info.context.user.is_authenticated:
raise GraphQLError("Must be logged in to access this mutation.")
if cls._meta.permissions and len(cls._meta.permissions) > 0:
if not info.context.user.has_perms(cls._meta.permissions):
raise GraphQLError("Not permitted to access this mutation.")
Model = cls._meta.model
model_field_values = {}
for name, value in super(type(input), input).items():
filter_field_split = name.split("__", 1)
field_name = filter_field_split[0]
try:
field = Model._meta.get_field(field_name)
except FieldDoesNotExist:
# This can happen with nested selectors. In this case we set the field to none.
field = None
filter_field_is_list = False
if len(filter_field_split) > 1:
# If we have an "__in" final part of the filter, we are now dealing with
# a list of things. Note that all other variants can be coerced directly
# on the filter-call, so we don't really have to deal with other cases.
filter_field_is_list = filter_field_split[-1] == "in"
new_value = value
value_handle_name = "handle_" + name
if hasattr(cls, value_handle_name):
handle_func = getattr(cls, value_handle_name)
assert callable(
handle_func
), f"Property {value_handle_name} on {cls.__name__} is not a function."
new_value = handle_func(value, name, info)
# On some fields we perform some default conversion, if the value was not transformed above.
if new_value == value and value is not None:
if type(field) in (models.ForeignKey, models.OneToOneField):
name = getattr(field, "db_column", None) or name + "_id"
new_value = disambiguate_id(value)
elif type(field) in (
models.ManyToManyField,
models.ManyToManyRel,
models.ManyToOneRel,
) or filter_field_is_list:
new_value = disambiguate_ids(value)
model_field_values[name] = new_value
filter_qs = Model.objects.filter(**model_field_values)
ids = [
to_global_id(get_global_registry().get_type_for_model(Model).__name__, id)
for id in filter_qs.values_list("id", flat=True)
]
deletion_count, _ = filter_qs.delete()
return cls(deletion_count=deletion_count, deleted_ids=ids)
| 34.404913
| 139
| 0.589673
| 4,937
| 43,419
| 4.793194
| 0.056107
| 0.045132
| 0.038032
| 0.037863
| 0.854716
| 0.835742
| 0.820487
| 0.801682
| 0.800203
| 0.796104
| 0
| 0.00108
| 0.338861
| 43,419
| 1,261
| 140
| 34.432197
| 0.823277
| 0.044635
| 0
| 0.817998
| 0
| 0
| 0.05027
| 0.00461
| 0
| 0
| 0
| 0
| 0.0091
| 1
| 0.019211
| false
| 0
| 0.013145
| 0
| 0.130435
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
afc3e776dae3b5b2f91d013ecf69e2b4d03f85b9
| 78,351
|
py
|
Python
|
hermes_fix/message_lib/FIX_4_2/fix_messages.py
|
yabov/hermes_fix
|
0a5e89fd15903a7ee0929e82b39879362e2e1008
|
[
"Apache-2.0"
] | 2
|
2020-02-20T15:00:35.000Z
|
2020-02-21T19:27:53.000Z
|
hermes_fix/message_lib/FIX_4_2/fix_messages.py
|
yabov/hermes_fix
|
0a5e89fd15903a7ee0929e82b39879362e2e1008
|
[
"Apache-2.0"
] | 3
|
2020-02-21T03:25:35.000Z
|
2020-02-21T18:37:42.000Z
|
hermes_fix/message_lib/FIX_4_2/fix_messages.py
|
yabov/hermes_fix
|
0a5e89fd15903a7ee0929e82b39879362e2e1008
|
[
"Apache-2.0"
] | null | null | null |
from ... import fix_message
from . import fields
from . import field_types
BEGINSTRING = 'FIX.4.2'
MESSAGE_TYPES = {}
class Header(fix_message.MessageBase):
def __init__(self):
super().__init__()
register_StandardHeader_component(self)
class Trailer(fix_message.MessageBase):
def __init__(self):
super().__init__()
register_StandardTrailer_component(self)
##############Begin Repeating Groups###############
class NoIOIQualifiersGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.IOIQualifier, False)
class NoRoutingIDsGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.RoutingType, False)
self.register_field(fields.RoutingID, False)
class NoContraBrokersGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.ContraBroker, False)
self.register_field(fields.ContraTrader, False)
self.register_field(fields.ContraTradeQty, False)
self.register_field(fields.ContraTradeTime, False)
class NoMsgTypesGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.RefMsgType, False)
self.register_field(fields.MsgDirection, False)
class NoRelatedSymGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.RelatdSym, False)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
class LinesOfTextGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.Text, True)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
class NoAllocsGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.AllocAccount, False)
self.register_field(fields.AllocShares, False)
class NoTradingSessionsGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.TradingSessionID, False)
class NoOrdersGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.ClOrdID, True)
self.register_field(fields.ListSeqNo, True)
self.register_field(fields.SettlInstMode, False)
self.register_field(fields.ClientID, False)
self.register_field(fields.ExecBroker, False)
self.register_field(fields.Account, False)
self.register_group(fields.NoAllocs, NoAllocsGroup, False)
self.register_field(fields.SettlmntTyp, False)
self.register_field(fields.FutSettDate, False)
self.register_field(fields.HandlInst, False)
self.register_field(fields.ExecInst, False)
self.register_field(fields.MinQty, False)
self.register_field(fields.MaxFloor, False)
self.register_field(fields.ExDestination, False)
self.register_group(fields.NoTradingSessions, NoTradingSessionsGroup, False)
self.register_field(fields.ProcessCode, False)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.PrevClosePx, False)
self.register_field(fields.Side, True)
self.register_field(fields.SideValueInd, False)
self.register_field(fields.LocateReqd, False)
self.register_field(fields.TransactTime, False)
self.register_field(fields.OrderQty, False)
self.register_field(fields.CashOrderQty, False)
self.register_field(fields.OrdType, False)
self.register_field(fields.Price, False)
self.register_field(fields.StopPx, False)
self.register_field(fields.Currency, False)
self.register_field(fields.ComplianceID, False)
self.register_field(fields.SolicitedFlag, False)
self.register_field(fields.IOIid, False)
self.register_field(fields.QuoteID, False)
self.register_field(fields.TimeInForce, False)
self.register_field(fields.EffectiveTime, False)
self.register_field(fields.ExpireDate, False)
self.register_field(fields.ExpireTime, False)
self.register_field(fields.GTBookingInst, False)
self.register_field(fields.Commission, False)
self.register_field(fields.CommType, False)
self.register_field(fields.Rule80A, False)
self.register_field(fields.ForexReq, False)
self.register_field(fields.SettlCurrency, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
self.register_field(fields.FutSettDate2, False)
self.register_field(fields.OrderQty2, False)
self.register_field(fields.OpenClose, False)
self.register_field(fields.CoveredOrUncovered, False)
self.register_field(fields.CustomerOrFirm, False)
self.register_field(fields.MaxShow, False)
self.register_field(fields.PegDifference, False)
self.register_field(fields.DiscretionInst, False)
self.register_field(fields.DiscretionOffset, False)
self.register_field(fields.ClearingFirm, False)
self.register_field(fields.ClearingAccount, False)
class NoExecsGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.LastShares, False)
self.register_field(fields.ExecID, False)
self.register_field(fields.LastPx, False)
self.register_field(fields.LastCapacity, False)
class NoMiscFeesGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.MiscFeeAmt, False)
self.register_field(fields.MiscFeeCurr, False)
self.register_field(fields.MiscFeeType, False)
class NoMDEntryTypesGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.MDEntryType, True)
class NoMDEntriesGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.MDEntryType, True)
self.register_field(fields.MDEntryPx, True)
self.register_field(fields.Currency, False)
self.register_field(fields.MDEntrySize, False)
self.register_field(fields.MDEntryDate, False)
self.register_field(fields.MDEntryTime, False)
self.register_field(fields.TickDirection, False)
self.register_field(fields.MDMkt, False)
self.register_field(fields.TradingSessionID, False)
self.register_field(fields.QuoteCondition, False)
self.register_field(fields.TradeCondition, False)
self.register_field(fields.MDEntryOriginator, False)
self.register_field(fields.LocationID, False)
self.register_field(fields.DeskID, False)
self.register_field(fields.OpenCloseSettleFlag, False)
self.register_field(fields.TimeInForce, False)
self.register_field(fields.ExpireDate, False)
self.register_field(fields.ExpireTime, False)
self.register_field(fields.MinQty, False)
self.register_field(fields.ExecInst, False)
self.register_field(fields.SellerDays, False)
self.register_field(fields.OrderID, False)
self.register_field(fields.QuoteEntryID, False)
self.register_field(fields.MDEntryBuyer, False)
self.register_field(fields.MDEntrySeller, False)
self.register_field(fields.NumberOfOrders, False)
self.register_field(fields.MDEntryPositionNo, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
class NoQuoteEntriesGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.UnderlyingSymbol, False)
class NoQuoteSetsGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.QuoteSetID, False)
self.register_field(fields.UnderlyingSymbol, False)
self.register_field(fields.UnderlyingSymbolSfx, False)
self.register_field(fields.UnderlyingSecurityID, False)
self.register_field(fields.UnderlyingIDSource, False)
self.register_field(fields.UnderlyingSecurityType, False)
self.register_field(fields.UnderlyingMaturityMonthYear, False)
self.register_field(fields.UnderlyingMaturityDay, False)
self.register_field(fields.UnderlyingPutOrCall, False)
self.register_field(fields.UnderlyingStrikePrice, False)
self.register_field(fields.UnderlyingOptAttribute, False)
self.register_field(fields.UnderlyingContractMultiplier, False)
self.register_field(fields.UnderlyingCouponRate, False)
self.register_field(fields.UnderlyingSecurityExchange, False)
self.register_field(fields.UnderlyingIssuer, False)
self.register_field(fields.EncodedUnderlyingIssuerLen, False)
self.register_field(fields.EncodedUnderlyingIssuer, False)
self.register_field(fields.UnderlyingSecurityDesc, False)
self.register_field(fields.EncodedUnderlyingSecurityDescLen, False)
self.register_field(fields.EncodedUnderlyingSecurityDesc, False)
self.register_field(fields.TotQuoteEntries, False)
self.register_group(fields.NoQuoteEntries, NoQuoteEntriesGroup, False)
class NoBidDescriptorsGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.BidDescriptorType, False)
self.register_field(fields.BidDescriptor, False)
self.register_field(fields.SideValueInd, False)
self.register_field(fields.LiquidityValue, False)
self.register_field(fields.LiquidityNumSecurities, False)
self.register_field(fields.LiquidityPctLow, False)
self.register_field(fields.LiquidityPctHigh, False)
self.register_field(fields.EFPTrackingError, False)
self.register_field(fields.FairValue, False)
self.register_field(fields.OutsideIndexPct, False)
self.register_field(fields.ValueOfFutures, False)
class NoBidComponentsGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.ListID, False)
self.register_field(fields.Side, False)
self.register_field(fields.TradingSessionID, False)
self.register_field(fields.NetGrossInd, False)
self.register_field(fields.SettlmntTyp, False)
self.register_field(fields.FutSettDate, False)
self.register_field(fields.Account, False)
class NoStrikesGroup(fix_message.FIXGroup):
def __init__(self, value = None):
super().__init__(value)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.PrevClosePx, False)
self.register_field(fields.ClOrdID, False)
self.register_field(fields.Side, False)
self.register_field(fields.Price, True)
self.register_field(fields.Currency, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
##############End Repeating Groups###############
##############Begin Componenets###############
def register_StandardHeader_component(self):
self.register_field(fields.BeginString, True)
self.register_field(fields.BodyLength, True)
self.register_field(fields.MsgType, True)
self.register_field(fields.SenderCompID, True)
self.register_field(fields.TargetCompID, True)
self.register_field(fields.OnBehalfOfCompID, False)
self.register_field(fields.DeliverToCompID, False)
self.register_field(fields.SecureDataLen, False)
self.register_field(fields.SecureData, False)
self.register_field(fields.MsgSeqNum, True)
self.register_field(fields.SenderSubID, False)
self.register_field(fields.SenderLocationID, False)
self.register_field(fields.TargetSubID, False)
self.register_field(fields.TargetLocationID, False)
self.register_field(fields.OnBehalfOfSubID, False)
self.register_field(fields.OnBehalfOfLocationID, False)
self.register_field(fields.DeliverToSubID, False)
self.register_field(fields.DeliverToLocationID, False)
self.register_field(fields.PossDupFlag, False)
self.register_field(fields.PossResend, False)
self.register_field(fields.SendingTime, True)
self.register_field(fields.OrigSendingTime, False)
self.register_field(fields.XmlDataLen, False)
self.register_field(fields.XmlData, False)
self.register_field(fields.MessageEncoding, False)
self.register_field(fields.LastMsgSeqNumProcessed, False)
self.register_field(fields.OnBehalfOfSendingTime, False)
def register_StandardTrailer_component(self):
self.register_field(fields.SignatureLength, False)
self.register_field(fields.Signature, False)
self.register_field(fields.CheckSum, True)
##############End Componenets###############
class Heartbeat(fix_message.MessageBase):
_msgtype = '0'
_msgcat = 'admin'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.TestReqID, False)
MESSAGE_TYPES['0'] = Heartbeat
class TestRequest(fix_message.MessageBase):
_msgtype = '1'
_msgcat = 'admin'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.TestReqID, True)
MESSAGE_TYPES['1'] = TestRequest
class ResendRequest(fix_message.MessageBase):
_msgtype = '2'
_msgcat = 'admin'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.BeginSeqNo, True)
self.register_field(fields.EndSeqNo, True)
MESSAGE_TYPES['2'] = ResendRequest
class Reject(fix_message.MessageBase):
_msgtype = '3'
_msgcat = 'admin'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.RefSeqNum, True)
self.register_field(fields.RefTagID, False)
self.register_field(fields.RefMsgType, False)
self.register_field(fields.SessionRejectReason, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
MESSAGE_TYPES['3'] = Reject
class SequenceReset(fix_message.MessageBase):
_msgtype = '4'
_msgcat = 'admin'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.GapFillFlag, False)
self.register_field(fields.NewSeqNo, True)
MESSAGE_TYPES['4'] = SequenceReset
class Logout(fix_message.MessageBase):
_msgtype = '5'
_msgcat = 'admin'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
MESSAGE_TYPES['5'] = Logout
class IOI(fix_message.MessageBase):
_msgtype = '6'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.IOIid, True)
self.register_field(fields.IOITransType, True)
self.register_field(fields.IOIRefID, False)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.Side, True)
self.register_field(fields.IOIShares, True)
self.register_field(fields.Price, False)
self.register_field(fields.Currency, False)
self.register_field(fields.ValidUntilTime, False)
self.register_field(fields.IOIQltyInd, False)
self.register_field(fields.IOINaturalFlag, False)
self.register_group(fields.NoIOIQualifiers, NoIOIQualifiersGroup, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
self.register_field(fields.TransactTime, False)
self.register_field(fields.URLLink, False)
self.register_group(fields.NoRoutingIDs, NoRoutingIDsGroup, False)
self.register_field(fields.SpreadToBenchmark, False)
self.register_field(fields.Benchmark, False)
MESSAGE_TYPES['6'] = IOI
class Advertisement(fix_message.MessageBase):
_msgtype = '7'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.AdvId, True)
self.register_field(fields.AdvTransType, True)
self.register_field(fields.AdvRefID, False)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.AdvSide, True)
self.register_field(fields.Shares, True)
self.register_field(fields.Price, False)
self.register_field(fields.Currency, False)
self.register_field(fields.TradeDate, False)
self.register_field(fields.TransactTime, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
self.register_field(fields.URLLink, False)
self.register_field(fields.LastMkt, False)
self.register_field(fields.TradingSessionID, False)
MESSAGE_TYPES['7'] = Advertisement
class ExecutionReport(fix_message.MessageBase):
_msgtype = '8'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.OrderID, True)
self.register_field(fields.SecondaryOrderID, False)
self.register_field(fields.ClOrdID, False)
self.register_field(fields.OrigClOrdID, False)
self.register_field(fields.ClientID, False)
self.register_field(fields.ExecBroker, False)
self.register_group(fields.NoContraBrokers, NoContraBrokersGroup, False)
self.register_field(fields.ListID, False)
self.register_field(fields.ExecID, True)
self.register_field(fields.ExecTransType, True)
self.register_field(fields.ExecRefID, False)
self.register_field(fields.ExecType, True)
self.register_field(fields.OrdStatus, True)
self.register_field(fields.OrdRejReason, False)
self.register_field(fields.ExecRestatementReason, False)
self.register_field(fields.Account, False)
self.register_field(fields.SettlmntTyp, False)
self.register_field(fields.FutSettDate, False)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.Side, True)
self.register_field(fields.OrderQty, False)
self.register_field(fields.CashOrderQty, False)
self.register_field(fields.OrdType, False)
self.register_field(fields.Price, False)
self.register_field(fields.StopPx, False)
self.register_field(fields.PegDifference, False)
self.register_field(fields.DiscretionInst, False)
self.register_field(fields.DiscretionOffset, False)
self.register_field(fields.Currency, False)
self.register_field(fields.ComplianceID, False)
self.register_field(fields.SolicitedFlag, False)
self.register_field(fields.TimeInForce, False)
self.register_field(fields.EffectiveTime, False)
self.register_field(fields.ExpireDate, False)
self.register_field(fields.ExpireTime, False)
self.register_field(fields.ExecInst, False)
self.register_field(fields.Rule80A, False)
self.register_field(fields.LastShares, False)
self.register_field(fields.LastPx, False)
self.register_field(fields.LastSpotRate, False)
self.register_field(fields.LastForwardPoints, False)
self.register_field(fields.LastMkt, False)
self.register_field(fields.TradingSessionID, False)
self.register_field(fields.LastCapacity, False)
self.register_field(fields.LeavesQty, True)
self.register_field(fields.CumQty, True)
self.register_field(fields.AvgPx, True)
self.register_field(fields.DayOrderQty, False)
self.register_field(fields.DayCumQty, False)
self.register_field(fields.DayAvgPx, False)
self.register_field(fields.GTBookingInst, False)
self.register_field(fields.TradeDate, False)
self.register_field(fields.TransactTime, False)
self.register_field(fields.ReportToExch, False)
self.register_field(fields.Commission, False)
self.register_field(fields.CommType, False)
self.register_field(fields.GrossTradeAmt, False)
self.register_field(fields.SettlCurrAmt, False)
self.register_field(fields.SettlCurrency, False)
self.register_field(fields.SettlCurrFxRate, False)
self.register_field(fields.SettlCurrFxRateCalc, False)
self.register_field(fields.HandlInst, False)
self.register_field(fields.MinQty, False)
self.register_field(fields.MaxFloor, False)
self.register_field(fields.OpenClose, False)
self.register_field(fields.MaxShow, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
self.register_field(fields.FutSettDate2, False)
self.register_field(fields.OrderQty2, False)
self.register_field(fields.ClearingFirm, False)
self.register_field(fields.ClearingAccount, False)
self.register_field(fields.MultiLegReportingType, False)
MESSAGE_TYPES['8'] = ExecutionReport
class OrderCancelReject(fix_message.MessageBase):
_msgtype = '9'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.OrderID, True)
self.register_field(fields.SecondaryOrderID, False)
self.register_field(fields.ClOrdID, True)
self.register_field(fields.OrigClOrdID, True)
self.register_field(fields.OrdStatus, True)
self.register_field(fields.ClientID, False)
self.register_field(fields.ExecBroker, False)
self.register_field(fields.ListID, False)
self.register_field(fields.Account, False)
self.register_field(fields.TransactTime, False)
self.register_field(fields.CxlRejResponseTo, True)
self.register_field(fields.CxlRejReason, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
MESSAGE_TYPES['9'] = OrderCancelReject
class Logon(fix_message.MessageBase):
_msgtype = 'A'
_msgcat = 'admin'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.EncryptMethod, True)
self.register_field(fields.HeartBtInt, True)
self.register_field(fields.RawDataLength, False)
self.register_field(fields.RawData, False)
self.register_field(fields.ResetSeqNumFlag, False)
self.register_field(fields.MaxMessageSize, False)
self.register_group(fields.NoMsgTypes, NoMsgTypesGroup, False)
MESSAGE_TYPES['A'] = Logon
class News(fix_message.MessageBase):
_msgtype = 'B'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.OrigTime, False)
self.register_field(fields.Urgency, False)
self.register_field(fields.Headline, True)
self.register_field(fields.EncodedHeadlineLen, False)
self.register_field(fields.EncodedHeadline, False)
self.register_group(fields.NoRoutingIDs, NoRoutingIDsGroup, False)
self.register_group(fields.NoRelatedSym, NoRelatedSymGroup, False)
self.register_group(fields.LinesOfText, LinesOfTextGroup, True)
self.register_field(fields.URLLink, False)
self.register_field(fields.RawDataLength, False)
self.register_field(fields.RawData, False)
MESSAGE_TYPES['B'] = News
class Email(fix_message.MessageBase):
_msgtype = 'C'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.EmailThreadID, True)
self.register_field(fields.EmailType, True)
self.register_field(fields.OrigTime, False)
self.register_field(fields.Subject, True)
self.register_field(fields.EncodedSubjectLen, False)
self.register_field(fields.EncodedSubject, False)
self.register_group(fields.NoRoutingIDs, NoRoutingIDsGroup, False)
self.register_group(fields.NoRelatedSym, NoRelatedSymGroup, False)
self.register_field(fields.OrderID, False)
self.register_field(fields.ClOrdID, False)
self.register_group(fields.LinesOfText, LinesOfTextGroup, True)
self.register_field(fields.RawDataLength, False)
self.register_field(fields.RawData, False)
MESSAGE_TYPES['C'] = Email
class OrderSingle(fix_message.MessageBase):
_msgtype = 'D'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.ClOrdID, True)
self.register_field(fields.ClientID, False)
self.register_field(fields.ExecBroker, False)
self.register_field(fields.Account, False)
self.register_group(fields.NoAllocs, NoAllocsGroup, False)
self.register_field(fields.SettlmntTyp, False)
self.register_field(fields.FutSettDate, False)
self.register_field(fields.HandlInst, True)
self.register_field(fields.ExecInst, False)
self.register_field(fields.MinQty, False)
self.register_field(fields.MaxFloor, False)
self.register_field(fields.ExDestination, False)
self.register_group(fields.NoTradingSessions, NoTradingSessionsGroup, False)
self.register_field(fields.ProcessCode, False)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.PrevClosePx, False)
self.register_field(fields.Side, True)
self.register_field(fields.LocateReqd, False)
self.register_field(fields.TransactTime, True)
self.register_field(fields.OrderQty, False)
self.register_field(fields.CashOrderQty, False)
self.register_field(fields.OrdType, True)
self.register_field(fields.Price, False)
self.register_field(fields.StopPx, False)
self.register_field(fields.Currency, False)
self.register_field(fields.ComplianceID, False)
self.register_field(fields.SolicitedFlag, False)
self.register_field(fields.IOIid, False)
self.register_field(fields.QuoteID, False)
self.register_field(fields.TimeInForce, False)
self.register_field(fields.EffectiveTime, False)
self.register_field(fields.ExpireDate, False)
self.register_field(fields.ExpireTime, False)
self.register_field(fields.GTBookingInst, False)
self.register_field(fields.Commission, False)
self.register_field(fields.CommType, False)
self.register_field(fields.Rule80A, False)
self.register_field(fields.ForexReq, False)
self.register_field(fields.SettlCurrency, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
self.register_field(fields.FutSettDate2, False)
self.register_field(fields.OrderQty2, False)
self.register_field(fields.OpenClose, False)
self.register_field(fields.CoveredOrUncovered, False)
self.register_field(fields.CustomerOrFirm, False)
self.register_field(fields.MaxShow, False)
self.register_field(fields.PegDifference, False)
self.register_field(fields.DiscretionInst, False)
self.register_field(fields.DiscretionOffset, False)
self.register_field(fields.ClearingFirm, False)
self.register_field(fields.ClearingAccount, False)
MESSAGE_TYPES['D'] = OrderSingle
class OrderList(fix_message.MessageBase):
_msgtype = 'E'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.ListID, True)
self.register_field(fields.BidID, False)
self.register_field(fields.ClientBidID, False)
self.register_field(fields.ProgRptReqs, False)
self.register_field(fields.BidType, True)
self.register_field(fields.ProgPeriodInterval, False)
self.register_field(fields.ListExecInstType, False)
self.register_field(fields.ListExecInst, False)
self.register_field(fields.EncodedListExecInstLen, False)
self.register_field(fields.EncodedListExecInst, False)
self.register_field(fields.TotNoOrders, True)
self.register_group(fields.NoOrders, NoOrdersGroup, True)
MESSAGE_TYPES['E'] = OrderList
class OrderCancelRequest(fix_message.MessageBase):
_msgtype = 'F'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.OrigClOrdID, True)
self.register_field(fields.OrderID, False)
self.register_field(fields.ClOrdID, True)
self.register_field(fields.ListID, False)
self.register_field(fields.Account, False)
self.register_field(fields.ClientID, False)
self.register_field(fields.ExecBroker, False)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.Side, True)
self.register_field(fields.TransactTime, True)
self.register_field(fields.OrderQty, False)
self.register_field(fields.CashOrderQty, False)
self.register_field(fields.ComplianceID, False)
self.register_field(fields.SolicitedFlag, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
MESSAGE_TYPES['F'] = OrderCancelRequest
class OrderCancelReplaceRequest(fix_message.MessageBase):
_msgtype = 'G'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.OrderID, False)
self.register_field(fields.ClientID, False)
self.register_field(fields.ExecBroker, False)
self.register_field(fields.OrigClOrdID, True)
self.register_field(fields.ClOrdID, True)
self.register_field(fields.ListID, False)
self.register_field(fields.Account, False)
self.register_group(fields.NoAllocs, NoAllocsGroup, False)
self.register_field(fields.SettlmntTyp, False)
self.register_field(fields.FutSettDate, False)
self.register_field(fields.HandlInst, True)
self.register_field(fields.ExecInst, False)
self.register_field(fields.MinQty, False)
self.register_field(fields.MaxFloor, False)
self.register_field(fields.ExDestination, False)
self.register_group(fields.NoTradingSessions, NoTradingSessionsGroup, False)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.Side, True)
self.register_field(fields.TransactTime, True)
self.register_field(fields.OrderQty, False)
self.register_field(fields.CashOrderQty, False)
self.register_field(fields.OrdType, True)
self.register_field(fields.Price, False)
self.register_field(fields.StopPx, False)
self.register_field(fields.PegDifference, False)
self.register_field(fields.DiscretionInst, False)
self.register_field(fields.DiscretionOffset, False)
self.register_field(fields.ComplianceID, False)
self.register_field(fields.SolicitedFlag, False)
self.register_field(fields.Currency, False)
self.register_field(fields.TimeInForce, False)
self.register_field(fields.EffectiveTime, False)
self.register_field(fields.ExpireDate, False)
self.register_field(fields.ExpireTime, False)
self.register_field(fields.GTBookingInst, False)
self.register_field(fields.Commission, False)
self.register_field(fields.CommType, False)
self.register_field(fields.Rule80A, False)
self.register_field(fields.ForexReq, False)
self.register_field(fields.SettlCurrency, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
self.register_field(fields.FutSettDate2, False)
self.register_field(fields.OrderQty2, False)
self.register_field(fields.OpenClose, False)
self.register_field(fields.CoveredOrUncovered, False)
self.register_field(fields.CustomerOrFirm, False)
self.register_field(fields.MaxShow, False)
self.register_field(fields.LocateReqd, False)
self.register_field(fields.ClearingFirm, False)
self.register_field(fields.ClearingAccount, False)
MESSAGE_TYPES['G'] = OrderCancelReplaceRequest
class OrderStatusRequest(fix_message.MessageBase):
_msgtype = 'H'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.OrderID, False)
self.register_field(fields.ClOrdID, True)
self.register_field(fields.ClientID, False)
self.register_field(fields.Account, False)
self.register_field(fields.ExecBroker, False)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.Side, True)
MESSAGE_TYPES['H'] = OrderStatusRequest
class Allocation(fix_message.MessageBase):
_msgtype = 'J'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.AllocID, True)
self.register_field(fields.AllocTransType, True)
self.register_field(fields.RefAllocID, False)
self.register_field(fields.AllocLinkID, False)
self.register_field(fields.AllocLinkType, False)
self.register_group(fields.NoOrders, NoOrdersGroup, False)
self.register_group(fields.NoExecs, NoExecsGroup, False)
self.register_field(fields.Side, True)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.Shares, True)
self.register_field(fields.LastMkt, False)
self.register_field(fields.TradingSessionID, False)
self.register_field(fields.AvgPx, True)
self.register_field(fields.Currency, False)
self.register_field(fields.AvgPrxPrecision, False)
self.register_field(fields.TradeDate, True)
self.register_field(fields.TransactTime, False)
self.register_field(fields.SettlmntTyp, False)
self.register_field(fields.FutSettDate, False)
self.register_field(fields.GrossTradeAmt, False)
self.register_field(fields.NetMoney, False)
self.register_field(fields.OpenClose, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
self.register_field(fields.NumDaysInterest, False)
self.register_field(fields.AccruedInterestRate, False)
self.register_group(fields.NoAllocs, NoAllocsGroup, False)
MESSAGE_TYPES['J'] = Allocation
class ListCancelRequest(fix_message.MessageBase):
_msgtype = 'K'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.ListID, True)
self.register_field(fields.TransactTime, True)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
MESSAGE_TYPES['K'] = ListCancelRequest
class ListExecute(fix_message.MessageBase):
_msgtype = 'L'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.ListID, True)
self.register_field(fields.ClientBidID, False)
self.register_field(fields.BidID, False)
self.register_field(fields.TransactTime, True)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
MESSAGE_TYPES['L'] = ListExecute
class ListStatusRequest(fix_message.MessageBase):
_msgtype = 'M'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.ListID, True)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
MESSAGE_TYPES['M'] = ListStatusRequest
class ListStatus(fix_message.MessageBase):
_msgtype = 'N'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.ListID, True)
self.register_field(fields.ListStatusType, True)
self.register_field(fields.NoRpts, True)
self.register_field(fields.ListOrderStatus, True)
self.register_field(fields.RptSeq, True)
self.register_field(fields.ListStatusText, False)
self.register_field(fields.EncodedListStatusTextLen, False)
self.register_field(fields.EncodedListStatusText, False)
self.register_field(fields.TransactTime, False)
self.register_field(fields.TotNoOrders, True)
self.register_group(fields.NoOrders, NoOrdersGroup, True)
MESSAGE_TYPES['N'] = ListStatus
class AllocationInstructionAck(fix_message.MessageBase):
_msgtype = 'P'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.ClientID, False)
self.register_field(fields.ExecBroker, False)
self.register_field(fields.AllocID, True)
self.register_field(fields.TradeDate, True)
self.register_field(fields.TransactTime, False)
self.register_field(fields.AllocStatus, True)
self.register_field(fields.AllocRejCode, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
MESSAGE_TYPES['P'] = AllocationInstructionAck
class DontKnowTrade(fix_message.MessageBase):
_msgtype = 'Q'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.OrderID, True)
self.register_field(fields.ExecID, True)
self.register_field(fields.DKReason, True)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.Side, True)
self.register_field(fields.OrderQty, False)
self.register_field(fields.CashOrderQty, False)
self.register_field(fields.LastShares, False)
self.register_field(fields.LastPx, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
MESSAGE_TYPES['Q'] = DontKnowTrade
class QuoteRequest(fix_message.MessageBase):
_msgtype = 'R'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.QuoteReqID, True)
self.register_group(fields.NoRelatedSym, NoRelatedSymGroup, True)
MESSAGE_TYPES['R'] = QuoteRequest
class Quote(fix_message.MessageBase):
_msgtype = 'S'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.QuoteReqID, False)
self.register_field(fields.QuoteID, True)
self.register_field(fields.QuoteResponseLevel, False)
self.register_field(fields.TradingSessionID, False)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.BidPx, False)
self.register_field(fields.OfferPx, False)
self.register_field(fields.BidSize, False)
self.register_field(fields.OfferSize, False)
self.register_field(fields.ValidUntilTime, False)
self.register_field(fields.BidSpotRate, False)
self.register_field(fields.OfferSpotRate, False)
self.register_field(fields.BidForwardPoints, False)
self.register_field(fields.OfferForwardPoints, False)
self.register_field(fields.TransactTime, False)
self.register_field(fields.FutSettDate, False)
self.register_field(fields.OrdType, False)
self.register_field(fields.FutSettDate2, False)
self.register_field(fields.OrderQty2, False)
self.register_field(fields.Currency, False)
MESSAGE_TYPES['S'] = Quote
class SettlementInstructions(fix_message.MessageBase):
_msgtype = 'T'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.SettlInstID, True)
self.register_field(fields.SettlInstTransType, True)
self.register_field(fields.SettlInstRefID, True)
self.register_field(fields.SettlInstMode, True)
self.register_field(fields.SettlInstSource, True)
self.register_field(fields.AllocAccount, True)
self.register_field(fields.SettlLocation, False)
self.register_field(fields.TradeDate, False)
self.register_field(fields.AllocID, False)
self.register_field(fields.LastMkt, False)
self.register_field(fields.TradingSessionID, False)
self.register_field(fields.Side, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.EffectiveTime, False)
self.register_field(fields.TransactTime, True)
self.register_field(fields.ClientID, False)
self.register_field(fields.ExecBroker, False)
self.register_field(fields.StandInstDbType, False)
self.register_field(fields.StandInstDbName, False)
self.register_field(fields.StandInstDbID, False)
self.register_field(fields.SettlDeliveryType, False)
self.register_field(fields.SettlDepositoryCode, False)
self.register_field(fields.SettlBrkrCode, False)
self.register_field(fields.SettlInstCode, False)
self.register_field(fields.SecuritySettlAgentName, False)
self.register_field(fields.SecuritySettlAgentCode, False)
self.register_field(fields.SecuritySettlAgentAcctNum, False)
self.register_field(fields.SecuritySettlAgentAcctName, False)
self.register_field(fields.SecuritySettlAgentContactName, False)
self.register_field(fields.SecuritySettlAgentContactPhone, False)
self.register_field(fields.CashSettlAgentName, False)
self.register_field(fields.CashSettlAgentCode, False)
self.register_field(fields.CashSettlAgentAcctNum, False)
self.register_field(fields.CashSettlAgentAcctName, False)
self.register_field(fields.CashSettlAgentContactName, False)
self.register_field(fields.CashSettlAgentContactPhone, False)
MESSAGE_TYPES['T'] = SettlementInstructions
class MarketDataRequest(fix_message.MessageBase):
_msgtype = 'V'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.MDReqID, True)
self.register_field(fields.SubscriptionRequestType, True)
self.register_field(fields.MarketDepth, True)
self.register_field(fields.MDUpdateType, False)
self.register_field(fields.AggregatedBook, False)
self.register_group(fields.NoMDEntryTypes, NoMDEntryTypesGroup, True)
self.register_group(fields.NoRelatedSym, NoRelatedSymGroup, True)
MESSAGE_TYPES['V'] = MarketDataRequest
class MarketDataSnapshotFullRefresh(fix_message.MessageBase):
_msgtype = 'W'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.MDReqID, False)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.FinancialStatus, False)
self.register_field(fields.CorporateAction, False)
self.register_field(fields.TotalVolumeTraded, False)
self.register_group(fields.NoMDEntries, NoMDEntriesGroup, True)
MESSAGE_TYPES['W'] = MarketDataSnapshotFullRefresh
class MarketDataIncrementalRefresh(fix_message.MessageBase):
_msgtype = 'X'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.MDReqID, False)
self.register_group(fields.NoMDEntries, NoMDEntriesGroup, True)
MESSAGE_TYPES['X'] = MarketDataIncrementalRefresh
class MarketDataRequestReject(fix_message.MessageBase):
_msgtype = 'Y'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.MDReqID, True)
self.register_field(fields.MDReqRejReason, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
MESSAGE_TYPES['Y'] = MarketDataRequestReject
class QuoteCancel(fix_message.MessageBase):
_msgtype = 'Z'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.QuoteReqID, False)
self.register_field(fields.QuoteID, True)
self.register_field(fields.QuoteCancelType, True)
self.register_field(fields.QuoteResponseLevel, False)
self.register_field(fields.TradingSessionID, False)
self.register_group(fields.NoQuoteEntries, NoQuoteEntriesGroup, True)
MESSAGE_TYPES['Z'] = QuoteCancel
class QuoteStatusRequest(fix_message.MessageBase):
_msgtype = 'a'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.QuoteID, False)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.Side, False)
self.register_field(fields.TradingSessionID, False)
MESSAGE_TYPES['a'] = QuoteStatusRequest
class QuoteAcknowledgement(fix_message.MessageBase):
_msgtype = 'b'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.QuoteReqID, False)
self.register_field(fields.QuoteID, False)
self.register_field(fields.QuoteAckStatus, True)
self.register_field(fields.QuoteRejectReason, False)
self.register_field(fields.QuoteResponseLevel, False)
self.register_field(fields.TradingSessionID, False)
self.register_field(fields.Text, False)
self.register_group(fields.NoQuoteSets, NoQuoteSetsGroup, False)
MESSAGE_TYPES['b'] = QuoteAcknowledgement
class SecurityDefinitionRequest(fix_message.MessageBase):
_msgtype = 'c'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.SecurityReqID, True)
self.register_field(fields.SecurityRequestType, True)
self.register_field(fields.Symbol, False)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.Currency, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
self.register_field(fields.TradingSessionID, False)
self.register_group(fields.NoRelatedSym, NoRelatedSymGroup, False)
MESSAGE_TYPES['c'] = SecurityDefinitionRequest
class SecurityDefinition(fix_message.MessageBase):
_msgtype = 'd'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.SecurityReqID, True)
self.register_field(fields.SecurityResponseID, True)
self.register_field(fields.SecurityResponseType, False)
self.register_field(fields.TotalNumSecurities, True)
self.register_field(fields.Symbol, False)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.Currency, False)
self.register_field(fields.TradingSessionID, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
self.register_group(fields.NoRelatedSym, NoRelatedSymGroup, False)
MESSAGE_TYPES['d'] = SecurityDefinition
class SecurityStatusRequest(fix_message.MessageBase):
_msgtype = 'e'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.SecurityStatusReqID, True)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.Currency, False)
self.register_field(fields.SubscriptionRequestType, True)
self.register_field(fields.TradingSessionID, False)
MESSAGE_TYPES['e'] = SecurityStatusRequest
class SecurityStatus(fix_message.MessageBase):
_msgtype = 'f'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.SecurityStatusReqID, False)
self.register_field(fields.Symbol, True)
self.register_field(fields.SymbolSfx, False)
self.register_field(fields.SecurityID, False)
self.register_field(fields.IDSource, False)
self.register_field(fields.SecurityType, False)
self.register_field(fields.MaturityMonthYear, False)
self.register_field(fields.MaturityDay, False)
self.register_field(fields.PutOrCall, False)
self.register_field(fields.StrikePrice, False)
self.register_field(fields.OptAttribute, False)
self.register_field(fields.ContractMultiplier, False)
self.register_field(fields.CouponRate, False)
self.register_field(fields.SecurityExchange, False)
self.register_field(fields.Issuer, False)
self.register_field(fields.EncodedIssuerLen, False)
self.register_field(fields.EncodedIssuer, False)
self.register_field(fields.SecurityDesc, False)
self.register_field(fields.EncodedSecurityDescLen, False)
self.register_field(fields.EncodedSecurityDesc, False)
self.register_field(fields.Currency, False)
self.register_field(fields.TradingSessionID, False)
self.register_field(fields.UnsolicitedIndicator, False)
self.register_field(fields.SecurityTradingStatus, False)
self.register_field(fields.FinancialStatus, False)
self.register_field(fields.CorporateAction, False)
self.register_field(fields.HaltReason, False)
self.register_field(fields.InViewOfCommon, False)
self.register_field(fields.DueToRelated, False)
self.register_field(fields.BuyVolume, False)
self.register_field(fields.SellVolume, False)
self.register_field(fields.HighPx, False)
self.register_field(fields.LowPx, False)
self.register_field(fields.LastPx, False)
self.register_field(fields.TransactTime, False)
self.register_field(fields.Adjustment, False)
MESSAGE_TYPES['f'] = SecurityStatus
class TradingSessionStatusRequest(fix_message.MessageBase):
_msgtype = 'g'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.TradSesReqID, True)
self.register_field(fields.TradingSessionID, False)
self.register_field(fields.TradSesMethod, False)
self.register_field(fields.TradSesMode, False)
self.register_field(fields.SubscriptionRequestType, True)
MESSAGE_TYPES['g'] = TradingSessionStatusRequest
class TradingSessionStatus(fix_message.MessageBase):
_msgtype = 'h'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.TradSesReqID, False)
self.register_field(fields.TradingSessionID, True)
self.register_field(fields.TradSesMethod, False)
self.register_field(fields.TradSesMode, False)
self.register_field(fields.UnsolicitedIndicator, False)
self.register_field(fields.TradSesStatus, True)
self.register_field(fields.TradSesStartTime, False)
self.register_field(fields.TradSesOpenTime, False)
self.register_field(fields.TradSesPreCloseTime, False)
self.register_field(fields.TradSesCloseTime, False)
self.register_field(fields.TradSesEndTime, False)
self.register_field(fields.TotalVolumeTraded, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
MESSAGE_TYPES['h'] = TradingSessionStatus
class MassQuote(fix_message.MessageBase):
_msgtype = 'i'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.QuoteReqID, False)
self.register_field(fields.QuoteID, True)
self.register_field(fields.QuoteResponseLevel, False)
self.register_field(fields.DefBidSize, False)
self.register_field(fields.DefOfferSize, False)
self.register_group(fields.NoQuoteSets, NoQuoteSetsGroup, True)
MESSAGE_TYPES['i'] = MassQuote
class BusinessMessageReject(fix_message.MessageBase):
_msgtype = 'j'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.RefSeqNum, False)
self.register_field(fields.RefMsgType, True)
self.register_field(fields.BusinessRejectRefID, False)
self.register_field(fields.BusinessRejectReason, True)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
MESSAGE_TYPES['j'] = BusinessMessageReject
class BidRequest(fix_message.MessageBase):
_msgtype = 'k'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.BidID, False)
self.register_field(fields.ClientBidID, True)
self.register_field(fields.BidRequestTransType, True)
self.register_field(fields.ListName, False)
self.register_field(fields.TotalNumSecurities, True)
self.register_field(fields.BidType, True)
self.register_field(fields.NumTickets, False)
self.register_field(fields.Currency, False)
self.register_field(fields.SideValue1, False)
self.register_field(fields.SideValue2, False)
self.register_group(fields.NoBidDescriptors, NoBidDescriptorsGroup, False)
self.register_group(fields.NoBidComponents, NoBidComponentsGroup, False)
self.register_field(fields.LiquidityIndType, False)
self.register_field(fields.WtAverageLiquidity, False)
self.register_field(fields.ExchangeForPhysical, False)
self.register_field(fields.OutMainCntryUIndex, False)
self.register_field(fields.CrossPercent, False)
self.register_field(fields.ProgRptReqs, False)
self.register_field(fields.ProgPeriodInterval, False)
self.register_field(fields.IncTaxInd, False)
self.register_field(fields.ForexReq, False)
self.register_field(fields.NumBidders, False)
self.register_field(fields.TradeDate, False)
self.register_field(fields.TradeType, True)
self.register_field(fields.BasisPxType, True)
self.register_field(fields.StrikeTime, False)
self.register_field(fields.Text, False)
self.register_field(fields.EncodedTextLen, False)
self.register_field(fields.EncodedText, False)
MESSAGE_TYPES['k'] = BidRequest
class BidResponse(fix_message.MessageBase):
_msgtype = 'l'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.BidID, False)
self.register_field(fields.ClientBidID, False)
self.register_group(fields.NoBidComponents, NoBidComponentsGroup, True)
MESSAGE_TYPES['l'] = BidResponse
class ListStrikePrice(fix_message.MessageBase):
_msgtype = 'm'
_msgcat = 'app'
def __init__(self):
self.Header = Header()
self.Trailer = Trailer()
super().__init__()
self.register_field(fields.ListID, True)
self.register_field(fields.TotNoStrikes, True)
self.register_group(fields.NoStrikes, NoStrikesGroup, True)
MESSAGE_TYPES['m'] = ListStrikePrice
| 44.695379
| 84
| 0.716596
| 8,327
| 78,351
| 6.515552
| 0.060886
| 0.253912
| 0.348429
| 0.471404
| 0.873376
| 0.779007
| 0.771708
| 0.763303
| 0.761699
| 0.758179
| 0
| 0.000659
| 0.186188
| 78,351
| 1,752
| 85
| 44.72089
| 0.850227
| 0.000944
| 0
| 0.719874
| 0
| 0
| 0.003211
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042902
| false
| 0
| 0.001893
| 0
| 0.14448
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
bb9dee1e660a88e5b77ff492de4bb87e6c0e8084
| 1,267
|
py
|
Python
|
suitcase/nxsas/tests/test__build_bluesky_document_path.py
|
jklynch/suitcase-sas
|
914572806f47f7a3a373755e772eafc45a9c3dd4
|
[
"BSD-3-Clause"
] | 1
|
2021-07-30T11:03:26.000Z
|
2021-07-30T11:03:26.000Z
|
suitcase/nxsas/tests/test__build_bluesky_document_path.py
|
jklynch/suitcase-sas
|
914572806f47f7a3a373755e772eafc45a9c3dd4
|
[
"BSD-3-Clause"
] | 2
|
2020-07-02T18:22:06.000Z
|
2020-12-02T16:38:52.000Z
|
suitcase/nxsas/tests/test__build_bluesky_document_path.py
|
jklynch/suitcase-sas
|
914572806f47f7a3a373755e772eafc45a9c3dd4
|
[
"BSD-3-Clause"
] | 3
|
2020-06-07T13:54:13.000Z
|
2020-08-10T13:36:28.000Z
|
from suitcase.nxsas.utils import _parse_bluesky_document_path
def test__build_bluesky_document_path():
parsed_path = _parse_bluesky_document_path("#bluesky/start@abc")
assert parsed_path["doc"] == "start"
assert parsed_path["attribute"] == "abc"
parsed_path = _parse_bluesky_document_path("#bluesky/start/abc")
assert parsed_path["doc"] == "start"
assert parsed_path["keys"] == ("abc",)
parsed_path = _parse_bluesky_document_path("#bluesky/start/abc/def")
assert parsed_path["doc"] == "start"
assert parsed_path["keys"] == ("abc", "def")
parsed_path = _parse_bluesky_document_path("#bluesky/start/abc/def@ghi")
assert parsed_path["doc"] == "start"
assert parsed_path["keys"] == ("abc", "def")
assert parsed_path["attribute"] == "ghi"
parsed_path = _parse_bluesky_document_path("#bluesky/desc/primary/abc/def@ghi")
assert parsed_path["doc"] == "desc"
assert parsed_path["stream"] == "primary"
assert parsed_path["keys"] == ("abc", "def")
assert parsed_path["attribute"] == "ghi"
parsed_path = _parse_bluesky_document_path("#bluesky/stop/abc/def@ghi")
assert parsed_path["doc"] == "stop"
assert parsed_path["keys"] == ("abc", "def")
assert parsed_path["attribute"] == "ghi"
| 39.59375
| 83
| 0.685083
| 161
| 1,267
| 5.049689
| 0.149068
| 0.270603
| 0.314883
| 0.206642
| 0.822878
| 0.822878
| 0.822878
| 0.750308
| 0.750308
| 0.750308
| 0
| 0
| 0.147593
| 1,267
| 31
| 84
| 40.870968
| 0.752778
| 0
| 0
| 0.458333
| 0
| 0
| 0.233623
| 0.083662
| 0
| 0
| 0
| 0
| 0.666667
| 1
| 0.041667
| false
| 0
| 0.041667
| 0
| 0.083333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
bba092f23ad87dce6867534769fc80cb6bf2015e
| 1,233
|
py
|
Python
|
scripts/test_similarity.py
|
MOOC-Learner-Project/edx-extension-code-similarity
|
71f59adcd799bb63f3e08ada8eae34ac736a6537
|
[
"MIT"
] | null | null | null |
scripts/test_similarity.py
|
MOOC-Learner-Project/edx-extension-code-similarity
|
71f59adcd799bb63f3e08ada8eae34ac736a6537
|
[
"MIT"
] | null | null | null |
scripts/test_similarity.py
|
MOOC-Learner-Project/edx-extension-code-similarity
|
71f59adcd799bb63f3e08ada8eae34ac736a6537
|
[
"MIT"
] | null | null | null |
from compare_trajectories import get_similarity
def test_samples():
print("1-1")
print(get_similarity('for print if', '1-1', should_validate=False))
print(get_similarity('for print if if if for', '1-1', should_validate=False))
print(get_similarity('for print if if if if if', '1-1', should_validate=False))
print(get_similarity('for', '1-1', should_validate=False))
print(get_similarity('for print', '1-1', should_validate=False))
print(get_similarity('for print if if if', '1-1', should_validate=False))
print(get_similarity('a\na\np\na\nfor i in range(10):\n print(i)\nr=2', '1-1'))
print("1-2")
print(get_similarity('for print if', '1-2', should_validate=False))
print(get_similarity('for print', '1-2', should_validate=False))
print(get_similarity('for break continue if print', '1-2', should_validate=False))
print("1-3")
print(get_similarity('for print if if else', '1-3', should_validate=False))
print(get_similarity('for', '1-3', should_validate=False))
print(get_similarity('for print', '1-3', should_validate=False))
print(get_similarity('for print for if else if', '1-3', should_validate=False))
if __name__ == "__main__":
test_samples()
| 45.666667
| 86
| 0.690998
| 194
| 1,233
| 4.190722
| 0.170103
| 0.239852
| 0.309963
| 0.335793
| 0.808118
| 0.782288
| 0.782288
| 0.640836
| 0.640836
| 0.479705
| 0
| 0.035071
| 0.144363
| 1,233
| 26
| 87
| 47.423077
| 0.735545
| 0
| 0
| 0
| 0
| 0.047619
| 0.24412
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| true
| 0
| 0.047619
| 0
| 0.095238
| 0.809524
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
bbb29607113b1bf49819699274dd67cefc202b9f
| 11,884
|
py
|
Python
|
housebot/comm/rotate_useragent.py
|
jbkopecky/housebot
|
750b46f0ce61535b47fc23e39a4cc0baaad0f51c
|
[
"MIT"
] | 3
|
2016-09-22T12:25:18.000Z
|
2017-11-08T15:58:00.000Z
|
housebot/comm/rotate_useragent.py
|
jbkopecky/housebot
|
750b46f0ce61535b47fc23e39a4cc0baaad0f51c
|
[
"MIT"
] | null | null | null |
housebot/comm/rotate_useragent.py
|
jbkopecky/housebot
|
750b46f0ce61535b47fc23e39a4cc0baaad0f51c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/python
#-*-coding:utf-8-*-
import random
from scrapy.downloadermiddlewares.useragent import UserAgentMiddleware
class RotateUserAgentMiddleware(UserAgentMiddleware):
def __init__(self, user_agent=''):
self.user_agent = user_agent
def process_request(self, request, spider):
ua = random.choice(self.user_agent_list)
if ua:
request.headers.setdefault('User-Agent', ua)
#the default user_agent_list composes chrome,I E,firefox,Mozilla,opera,netscape
#for more user agent strings,you can find it in http://www.useragentstring.com/pages/useragentstring.php
user_agent_list = [\
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/22.0.1207.1 Safari/537.1",\
"Mozilla/5.0 (X11; CrOS i686 2268.111.0) AppleWebKit/536.11 (KHTML, like Gecko) Chrome/20.0.1132.57 Safari/536.11",\
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1092.0 Safari/536.6",\
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.6 (KHTML, like Gecko) Chrome/20.0.1090.0 Safari/536.6",\
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.1 (KHTML, like Gecko) Chrome/19.77.34.5 Safari/537.1",\
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.9 Safari/536.5",\
"Mozilla/5.0 (Windows NT 6.0) AppleWebKit/536.5 (KHTML, like Gecko) Chrome/19.0.1084.36 Safari/536.5",\
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",\
"Mozilla/5.0 (Windows NT 5.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_8_0) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1063.0 Safari/536.3",\
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",\
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1062.0 Safari/536.3",\
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",\
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",\
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.1 Safari/536.3",\
"Mozilla/5.0 (Windows NT 6.2) AppleWebKit/536.3 (KHTML, like Gecko) Chrome/19.0.1061.0 Safari/536.3",\
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24",\
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/535.24 (KHTML, like Gecko) Chrome/19.0.1055.1 Safari/535.24"
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/601.7.7 (KHTML, like Gecko) Version/9.1.2 Safari/601.7.7",\
"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:48.0) Gecko/20100101 Firefox/48.0",\
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:48.0) Gecko/20100101 Firefox/48.0",\
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Windows NT 6.3; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Windows NT 6.1; WOW64; Trident/7.0; rv:11.0) like Gecko",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:48.0) Gecko/20100101 Firefox/48.0",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:48.0) Gecko/20100101 Firefox/48.0",\
"Mozilla/5.0 (Windows NT 6.1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/601.7.8 (KHTML, like Gecko) Version/9.1.3 Safari/601.7.8",\
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:47.0) Gecko/20100101 Firefox/47.0",\
"Mozilla/5.0 (Windows NT 10.0; WOW64; rv:47.0) Gecko/20100101 Firefox/47.0",\
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/46.0.2486.0 Safari/537.36 Edge/13.10586",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/601.6.17 (KHTML, like Gecko) Version/9.1.1 Safari/601.6.17",\
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_4) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:47.0) Gecko/20100101 Firefox/47.0",\
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Ubuntu Chromium/51.0.2704.79 Chrome/51.0.2704.79 Safari/537.36",\
"Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/601.7.7 (KHTML, like Gecko) Version/9.1.2 Safari/601.7.7",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_3) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (X11; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:48.0) Gecko/20100101 Firefox/48.0",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12) AppleWebKit/602.1.50 (KHTML, like Gecko) Version/10.0 Safari/602.1.50",\
"Mozilla/5.0 (X11; Linux x86_64; rv:48.0) Gecko/20100101 Firefox/48.0",\
"Mozilla/5.0 (Windows NT 6.1; rv:48.0) Gecko/20100101 Firefox/48.0",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.10; rv:48.0) Gecko/20100101 Firefox/48.0",\
"Mozilla/5.0 (Windows NT 10.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Windows NT 6.1; Trident/7.0; rv:11.0) like Gecko",\
"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:48.0) Gecko/20100101 Firefox/48.0",\
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.101 Safari/537.36",\
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.101 Safari/537.36",\
"Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:47.0) Gecko/20100101 Firefox/47.0",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.101 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.89 Safari/537.36",\
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36",\
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36",\
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.89 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_1) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.101 Safari/537.36",\
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.79 Safari/537.36 Edge/14.14393",\
"Mozilla/5.0 (Windows NT 6.3; WOW64; rv:47.0) Gecko/20100101 Firefox/47.0",\
"Mozilla/5.0 (iPad; CPU OS 9_3_4 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13G35 Safari/601.1",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_4) AppleWebKit/601.5.17 (KHTML, like Gecko) Version/9.1 Safari/601.5.17",\
"Mozilla/5.0 (Windows NT 6.1; rv:47.0) Gecko/20100101 Firefox/47.0",\
"Mozilla/5.0 (Windows NT 6.1; WOW64; rv:45.0) Gecko/20100101 Firefox/45.0",\
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.106 Safari/537.36",\
"Mozilla/5.0 (Windows NT 6.3; WOW64; Trident/7.0; rv:11.0) like Gecko",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_2) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36",\
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.92 Safari/537.36",\
"Mozilla/5.0 (compatible; MSIE 9.0; Windows NT 6.1; Trident/5.0; Trident/5.0)",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36",\
"Mozilla/5.0 (Windows NT 6.2; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.103 Safari/537.36",\
"Mozilla/5.0 (Windows NT 6.1; Win64; x64; rv:48.0) Gecko/20100101 Firefox/48.0",\
"Mozilla/5.0 (iPad; CPU OS 9_3_5 like Mac OS X) AppleWebKit/601.1.46 (KHTML, like Gecko) Version/9.0 Mobile/13G36 Safari/601.1",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/601.7.7 (KHTML, like Gecko) Version/9.1.2 Safari/537.86.7",\
"Mozilla/5.0 (Windows NT 5.1; rv:48.0) Gecko/20100101 Firefox/48.0",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Windows NT 6.1; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36 OPR/39.0.2256.48",\
"Mozilla/5.0 (X11; Fedora; Linux x86_64; rv:48.0) Gecko/20100101 Firefox/48.0",\
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.89 Safari/537.36",\
"Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.82 Safari/537.36 OPR/39.0.2256.48",\
"Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/53.0.2785.101 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (X11; CrOS x86_64 8350.68.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Safari/537.36",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.11; rv:45.0) Gecko/20100101 Firefox/45.0",\
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10.9; rv:48.0) Gecko/20100101 Firefox/48.0",\
]
| 104.245614
| 149
| 0.647089
| 2,112
| 11,884
| 3.605114
| 0.073864
| 0.025479
| 0.112293
| 0.152351
| 0.900841
| 0.899527
| 0.888232
| 0.870239
| 0.847386
| 0.819018
| 0
| 0.233726
| 0.192107
| 11,884
| 113
| 150
| 105.168142
| 0.559317
| 0.018092
| 0
| 0
| 0
| 0.896226
| 0.825032
| 0.0018
| 0
| 0
| 0
| 0
| 0
| 1
| 0.018868
| false
| 0
| 0.018868
| 0
| 0.056604
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
bbb63db4f79cae56c7f0a66c97c10bbbe6820fc9
| 44
|
py
|
Python
|
web/frontend/hass_db/__init__.py
|
tcsvn/activity-assistant
|
eeb0ef72a046a8a781ff31b384edec8243dd22a7
|
[
"MIT"
] | 45
|
2020-11-06T20:31:13.000Z
|
2022-03-24T06:14:18.000Z
|
web/frontend/hass_db/__init__.py
|
tcsvn/activity-assistant
|
eeb0ef72a046a8a781ff31b384edec8243dd22a7
|
[
"MIT"
] | 10
|
2020-12-14T00:17:11.000Z
|
2022-02-06T19:39:01.000Z
|
web/frontend/hass_db/__init__.py
|
tcsvn/activity-assistant
|
eeb0ef72a046a8a781ff31b384edec8243dd22a7
|
[
"MIT"
] | 3
|
2020-12-15T22:50:09.000Z
|
2022-03-13T21:12:28.000Z
|
from .hass_db import url_from_hass_config
| 11
| 41
| 0.840909
| 8
| 44
| 4.125
| 0.75
| 0.484848
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.136364
| 44
| 3
| 42
| 14.666667
| 0.868421
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bbbd11689b57de4d9b9af5d907ff5e554c74b5f7
| 20,643
|
py
|
Python
|
pyaz/aks/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/aks/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/aks/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
'''
Manage Azure Kubernetes Services.
'''
from .. pyaz_utils import _call_az
from . import command, nodepool
def browse(name, resource_group, disable_browser=None, listen_address=None, listen_port=None):
'''
Show the dashboard for a Kubernetes cluster in a web browser.
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- disable_browser -- None
- listen_address -- None
- listen_port -- None
'''
return _call_az("az aks browse", locals())
def create(name, resource_group, aad_admin_group_object_ids=None, aad_client_app_id=None, aad_server_app_id=None, aad_server_app_secret=None, aad_tenant_id=None, aci_subnet_name=None, admin_username=None, aks_custom_headers=None, api_server_authorized_ip_ranges=None, appgw_id=None, appgw_name=None, appgw_subnet_cidr=None, appgw_subnet_id=None, appgw_watch_namespace=None, assign_identity=None, assign_kubelet_identity=None, attach_acr=None, auto_upgrade_channel=None, client_secret=None, cluster_autoscaler_profile=None, disable_local_accounts=None, disable_public_fqdn=None, disable_rbac=None, dns_name_prefix=None, dns_service_ip=None, docker_bridge_address=None, edge_zone=None, enable_aad=None, enable_addons=None, enable_ahub=None, enable_azure_rbac=None, enable_cluster_autoscaler=None, enable_encryption_at_host=None, enable_managed_identity=None, enable_node_public_ip=None, enable_private_cluster=None, enable_rbac=None, enable_secret_rotation=None, enable_sgxquotehelper=None, enable_ultra_ssd=None, fqdn_subdomain=None, generate_ssh_keys=None, kubernetes_version=None, load_balancer_idle_timeout=None, load_balancer_managed_outbound_ip_count=None, load_balancer_outbound_ip_prefixes=None, load_balancer_outbound_ips=None, load_balancer_outbound_ports=None, load_balancer_sku=None, location=None, max_count=None, max_pods=None, min_count=None, network_plugin=None, network_policy=None, no_ssh_key=None, no_wait=None, node_count=None, node_osdisk_diskencryptionset_id=None, node_osdisk_size=None, node_osdisk_type=None, node_public_ip_prefix_id=None, node_vm_size=None, nodepool_labels=None, nodepool_name=None, nodepool_tags=None, os_sku=None, outbound_type=None, pod_cidr=None, ppg=None, private_dns_zone=None, rotation_poll_interval=None, service_cidr=None, service_principal=None, skip_subnet_role_assignment=None, ssh_key_value=None, tags=None, uptime_sla=None, vm_set_type=None, vnet_subnet_id=None, windows_admin_password=None, windows_admin_username=None, workspace_resource_id=None, yes=None, zones=None):
'''
Create a new managed Kubernetes cluster.
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- aad_admin_group_object_ids -- None
- aad_client_app_id -- None
- aad_server_app_id -- None
- aad_server_app_secret -- None
- aad_tenant_id -- None
- aci_subnet_name -- None
- admin_username -- None
- aks_custom_headers -- None
- api_server_authorized_ip_ranges -- None
- appgw_id -- None
- appgw_name -- None
- appgw_subnet_cidr -- None
- appgw_subnet_id -- None
- appgw_watch_namespace -- None
- assign_identity -- None
- assign_kubelet_identity -- None
- attach_acr -- None
- auto_upgrade_channel -- None
- client_secret -- None
- cluster_autoscaler_profile -- Space-separated list of key=value pairs for configuring cluster autoscaler. Pass an empty string to clear the profile.
- disable_local_accounts -- None
- disable_public_fqdn -- None
- disable_rbac -- None
- dns_name_prefix -- None
- dns_service_ip -- None
- docker_bridge_address -- None
- edge_zone -- The name of edge zone.
- enable_aad -- None
- enable_addons -- None
- enable_ahub -- None
- enable_azure_rbac -- None
- enable_cluster_autoscaler -- None
- enable_encryption_at_host -- None
- enable_managed_identity -- None
- enable_node_public_ip -- None
- enable_private_cluster -- None
- enable_rbac -- None
- enable_secret_rotation -- None
- enable_sgxquotehelper -- None
- enable_ultra_ssd -- None
- fqdn_subdomain -- None
- generate_ssh_keys -- None
- kubernetes_version -- None
- load_balancer_idle_timeout -- None
- load_balancer_managed_outbound_ip_count -- None
- load_balancer_outbound_ip_prefixes -- None
- load_balancer_outbound_ips -- None
- load_balancer_outbound_ports -- None
- load_balancer_sku -- None
- location -- Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.
- max_count -- None
- max_pods -- None
- min_count -- None
- network_plugin -- None
- network_policy -- None
- no_ssh_key -- None
- no_wait -- Do not wait for the long-running operation to finish.
- node_count -- None
- node_osdisk_diskencryptionset_id -- None
- node_osdisk_size -- None
- node_osdisk_type -- None
- node_public_ip_prefix_id -- None
- node_vm_size -- None
- nodepool_labels -- space-separated labels: key[=value] [key[=value] ...]. See https://aka.ms/node-labels for syntax of labels.
- nodepool_name -- Node pool name, up to 12 alphanumeric characters
- nodepool_tags -- space-separated tags: key[=value] [key[=value] ...]. Use "" to clear existing tags.
- os_sku -- None
- outbound_type -- None
- pod_cidr -- None
- ppg -- None
- private_dns_zone -- None
- rotation_poll_interval -- None
- service_cidr -- None
- service_principal -- None
- skip_subnet_role_assignment -- None
- ssh_key_value -- None
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
- uptime_sla -- None
- vm_set_type -- None
- vnet_subnet_id -- None
- windows_admin_password -- None
- windows_admin_username -- None
- workspace_resource_id -- None
- yes -- Do not prompt for confirmation.
- zones -- Space-separated list of availability zones where agent nodes will be placed.
'''
return _call_az("az aks create", locals())
def update(name, resource_group, aad_admin_group_object_ids=None, aad_tenant_id=None, aks_custom_headers=None, api_server_authorized_ip_ranges=None, assign_identity=None, attach_acr=None, auto_upgrade_channel=None, cluster_autoscaler_profile=None, detach_acr=None, disable_ahub=None, disable_azure_rbac=None, disable_cluster_autoscaler=None, disable_local_accounts=None, disable_public_fqdn=None, disable_secret_rotation=None, enable_aad=None, enable_ahub=None, enable_azure_rbac=None, enable_cluster_autoscaler=None, enable_local_accounts=None, enable_managed_identity=None, enable_public_fqdn=None, enable_secret_rotation=None, load_balancer_idle_timeout=None, load_balancer_managed_outbound_ip_count=None, load_balancer_outbound_ip_prefixes=None, load_balancer_outbound_ips=None, load_balancer_outbound_ports=None, max_count=None, min_count=None, no_uptime_sla=None, no_wait=None, nodepool_labels=None, rotation_poll_interval=None, tags=None, update_cluster_autoscaler=None, uptime_sla=None, windows_admin_password=None, yes=None):
'''
Update a managed Kubernetes cluster.
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- aad_admin_group_object_ids -- None
- aad_tenant_id -- None
- aks_custom_headers -- None
- api_server_authorized_ip_ranges -- None
- assign_identity -- None
- attach_acr -- None
- auto_upgrade_channel -- None
- cluster_autoscaler_profile -- Space-separated list of key=value pairs for configuring cluster autoscaler. Pass an empty string to clear the profile.
- detach_acr -- None
- disable_ahub -- None
- disable_azure_rbac -- None
- disable_cluster_autoscaler -- None
- disable_local_accounts -- None
- disable_public_fqdn -- None
- disable_secret_rotation -- None
- enable_aad -- None
- enable_ahub -- None
- enable_azure_rbac -- None
- enable_cluster_autoscaler -- None
- enable_local_accounts -- None
- enable_managed_identity -- None
- enable_public_fqdn -- None
- enable_secret_rotation -- None
- load_balancer_idle_timeout -- None
- load_balancer_managed_outbound_ip_count -- None
- load_balancer_outbound_ip_prefixes -- None
- load_balancer_outbound_ips -- None
- load_balancer_outbound_ports -- None
- max_count -- None
- min_count -- None
- no_uptime_sla -- None
- no_wait -- Do not wait for the long-running operation to finish.
- nodepool_labels -- space-separated labels: key[=value] [key[=value] ...]. See https://aka.ms/node-labels for syntax of labels.
- rotation_poll_interval -- None
- tags -- space-separated tags: key[=value] [key[=value] ...]. Use '' to clear existing tags.
- update_cluster_autoscaler -- None
- uptime_sla -- None
- windows_admin_password -- None
- yes -- Do not prompt for confirmation.
'''
return _call_az("az aks update", locals())
def delete(name, resource_group, no_wait=None, yes=None):
'''
Delete a managed Kubernetes cluster.
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- no_wait -- Do not wait for the long-running operation to finish.
- yes -- Do not prompt for confirmation.
'''
return _call_az("az aks delete", locals())
def update_credentials(name, resource_group, aad_client_app_id=None, aad_server_app_id=None, aad_server_app_secret=None, aad_tenant_id=None, client_secret=None, no_wait=None, reset_aad=None, reset_service_principal=None, service_principal=None):
'''
Update credentials for a managed Kubernetes cluster, like service principal.
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- aad_client_app_id -- None
- aad_server_app_id -- None
- aad_server_app_secret -- None
- aad_tenant_id -- None
- client_secret -- None
- no_wait -- Do not wait for the long-running operation to finish.
- reset_aad -- None
- reset_service_principal -- None
- service_principal -- None
'''
return _call_az("az aks update-credentials", locals())
def disable_addons(addons, name, resource_group, no_wait=None):
'''
Disable Kubernetes addons.
Required Parameters:
- addons -- None
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- no_wait -- Do not wait for the long-running operation to finish.
'''
return _call_az("az aks disable-addons", locals())
def enable_addons(addons, name, resource_group, appgw_id=None, appgw_name=None, appgw_subnet_cidr=None, appgw_subnet_id=None, appgw_watch_namespace=None, enable_secret_rotation=None, enable_sgxquotehelper=None, no_wait=None, rotation_poll_interval=None, subnet_name=None, workspace_resource_id=None):
'''
Enable Kubernetes addons.
Required Parameters:
- addons -- None
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- appgw_id -- None
- appgw_name -- None
- appgw_subnet_cidr -- None
- appgw_subnet_id -- None
- appgw_watch_namespace -- None
- enable_secret_rotation -- None
- enable_sgxquotehelper -- None
- no_wait -- Do not wait for the long-running operation to finish.
- rotation_poll_interval -- None
- subnet_name -- Name of an existing subnet to use with the virtual-node add-on.
- workspace_resource_id -- None
'''
return _call_az("az aks enable-addons", locals())
def get_credentials(name, resource_group, admin=None, context=None, file=None, overwrite_existing=None, public_fqdn=None):
'''
Get access credentials for a managed Kubernetes cluster.
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- admin -- None
- context -- If specified, overwrite the default context name.
- file -- None
- overwrite_existing -- None
- public_fqdn -- None
'''
return _call_az("az aks get-credentials", locals())
def check_acr(acr, name, resource_group):
'''
Validate an ACR is accessible from an AKS cluster.
Required Parameters:
- acr -- None
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az aks check-acr", locals())
def get_upgrades(name, resource_group):
'''
Get the upgrade versions available for a managed Kubernetes cluster.
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az aks get-upgrades", locals())
def install_cli(base_src_url=None, client_version=None, install_location=None, kubelogin_base_src_url=None, kubelogin_install_location=None, kubelogin_version=None):
'''
Download and install kubectl, the Kubernetes command-line tool. Download and install kubelogin, a client-go credential (exec) plugin implementing azure authentication.
Optional Parameters:
- base_src_url -- Base download source URL for kubectl releases.
- client_version -- Version of kubectl to install.
- install_location -- Path at which to install kubectl.
- kubelogin_base_src_url -- Base download source URL for kubelogin releases.
- kubelogin_install_location -- Path at which to install kubelogin.
- kubelogin_version -- Version of kubelogin to install.
'''
return _call_az("az aks install-cli", locals())
def list(resource_group=None):
'''
List managed Kubernetes clusters.
Optional Parameters:
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az aks list", locals())
def remove_dev_spaces(name, resource_group, yes=None):
'''
Remove Azure Dev Spaces from a managed Kubernetes cluster.
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- yes -- Do not prompt for confirmation
'''
return _call_az("az aks remove-dev-spaces", locals())
def scale(name, node_count, resource_group, no_wait=None, nodepool_name=None):
'''
Scale the node pool in a managed Kubernetes cluster.
Required Parameters:
- name -- Name of the managed cluster.
- node_count -- None
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- no_wait -- Do not wait for the long-running operation to finish.
- nodepool_name -- Node pool name, up to 12 alphanumeric characters
'''
return _call_az("az aks scale", locals())
def show(name, resource_group):
'''
Show the details for a managed Kubernetes cluster.
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
'''
return _call_az("az aks show", locals())
def upgrade(name, resource_group, control_plane_only=None, kubernetes_version=None, no_wait=None, node_image_only=None, yes=None):
'''
Upgrade a managed Kubernetes cluster to a newer version.
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- control_plane_only -- None
- kubernetes_version -- None
- no_wait -- Do not wait for the long-running operation to finish.
- node_image_only -- None
- yes -- Do not prompt for confirmation.
'''
return _call_az("az aks upgrade", locals())
def use_dev_spaces(name, resource_group, endpoint=None, space=None, update=None, yes=None):
'''
Use Azure Dev Spaces with a managed Kubernetes cluster.
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- endpoint -- The endpoint type to be used for a Azure Dev Spaces controller. See https://aka.ms/azds-networking for more information.
- space -- Name of the new or existing dev space to select. Defaults to an interactive selection experience.
- update -- Update to the latest Azure Dev Spaces client components.
- yes -- Do not prompt for confirmation. Requires --space.
'''
return _call_az("az aks use-dev-spaces", locals())
def rotate_certs(name, resource_group, no_wait=None, yes=None):
'''
Rotate certificates and keys on a managed Kubernetes cluster
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- no_wait -- Do not wait for the long-running operation to finish.
- yes -- Do not prompt for confirmation.
'''
return _call_az("az aks rotate-certs", locals())
def wait(name, resource_group, created=None, custom=None, deleted=None, exists=None, interval=None, timeout=None, updated=None):
'''
Wait for a managed Kubernetes cluster to reach a desired state.
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- created -- wait until created with 'provisioningState' at 'Succeeded'
- custom -- Wait until the condition satisfies a custom JMESPath query. E.g. provisioningState!='InProgress', instanceView.statuses[?code=='PowerState/running']
- deleted -- wait until deleted
- exists -- wait until the resource exists
- interval -- polling interval in seconds
- timeout -- maximum wait in seconds
- updated -- wait until updated with provisioningState at 'Succeeded'
'''
return _call_az("az aks wait", locals())
def stop(name, resource_group, no_wait=None):
'''
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- no_wait -- Do not wait for the long-running operation to finish.
'''
return _call_az("az aks stop", locals())
def start(name, resource_group, no_wait=None):
'''
Required Parameters:
- name -- Name of the managed cluster.
- resource_group -- Name of resource group. You can configure the default group using `az configure --defaults group=<name>`
Optional Parameters:
- no_wait -- Do not wait for the long-running operation to finish.
'''
return _call_az("az aks start", locals())
def get_versions(location):
'''
Get the versions available for creating a managed Kubernetes cluster.
Required Parameters:
- location -- Location. Values from: `az account list-locations`. You can configure the default location using `az configure --defaults location=<location>`.
'''
return _call_az("az aks get-versions", locals())
| 44.393548
| 2,025
| 0.722618
| 2,772
| 20,643
| 5.137446
| 0.106782
| 0.054771
| 0.023173
| 0.027807
| 0.807879
| 0.780774
| 0.757812
| 0.751071
| 0.735482
| 0.722702
| 0
| 0.000238
| 0.185099
| 20,643
| 464
| 2,026
| 44.489224
| 0.846332
| 0.628688
| 0
| 0
| 0
| 0
| 0.057207
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.478261
| false
| 0.043478
| 0.043478
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
bbf296c5b9546c1b68436db02252a70429c63391
| 212
|
py
|
Python
|
cleanflow/__init__.py
|
vutsalsinghal/CleanFlow
|
4cfbe345ca3e59a254baf5aa9f6eae329b918527
|
[
"MIT"
] | 1
|
2018-09-02T19:56:28.000Z
|
2018-09-02T19:56:28.000Z
|
cleanflow/__init__.py
|
vutsalsinghal/CleanFlow
|
4cfbe345ca3e59a254baf5aa9f6eae329b918527
|
[
"MIT"
] | null | null | null |
cleanflow/__init__.py
|
vutsalsinghal/CleanFlow
|
4cfbe345ca3e59a254baf5aa9f6eae329b918527
|
[
"MIT"
] | 1
|
2018-05-03T20:13:18.000Z
|
2018-05-03T20:13:18.000Z
|
from .assertions import assert_type_str,assert_cols_in_df, assert_type_str_or_list, assert_type_int_or_float
__all__ = ['assert_type_str_or_list','assert_type_int_or_float','assert_type_str','assert_cols_in_df']
| 70.666667
| 108
| 0.872642
| 38
| 212
| 4.078947
| 0.368421
| 0.387097
| 0.335484
| 0.245161
| 0.851613
| 0.851613
| 0.851613
| 0.503226
| 0.503226
| 0.503226
| 0
| 0
| 0.042453
| 212
| 3
| 109
| 70.666667
| 0.763547
| 0
| 0
| 0
| 0
| 0
| 0.370892
| 0.220657
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 9
|
a59041d2d410a8ed7f31ac54cc4446f8da617f82
| 28,416
|
py
|
Python
|
models/drocc.py
|
jbr-ai-labs/PU-OC
|
4030a67353594d864a2a9482dd3f5d206cbd28ae
|
[
"MIT"
] | null | null | null |
models/drocc.py
|
jbr-ai-labs/PU-OC
|
4030a67353594d864a2a9482dd3f5d206cbd28ae
|
[
"MIT"
] | null | null | null |
models/drocc.py
|
jbr-ai-labs/PU-OC
|
4030a67353594d864a2a9482dd3f5d206cbd28ae
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn.functional as F
import torch.utils.data
import torch.utils.data
from models.base_models import OCModel, PUModelRandomBatch
from models.classifiers import Net
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# code DROCC is borrowed from https://github.com/microsoft/EdgeML
class DROCC(OCModel):
def __init__(self,
model=Net,
lam=0.5,
radius=8,
gamma=2,
warmup_epochs=6,
ascent_step_size=0.001,
ascent_num_steps=50,
half=True):
super().__init__(model, 0)
self.lam = lam
self.radius = radius
self.gamma = gamma
self.warmup_epochs = warmup_epochs
self.ascent_step_size = ascent_step_size
self.ascent_num_steps = ascent_num_steps
self.half = half
def batch_loss(self, batch):
data, target = batch[0], batch[2]
data, target = data.to(device), target.to(device)
# Data Processing
data = data.to(torch.float)
target = target.to(torch.float)
target = torch.squeeze(target)
# Extract the logits for cross entropy loss
logits_start = self.model.forward_start(data)
logits = self.model.forward_end(logits_start)
logits = torch.squeeze(logits, dim=1)
ce_loss = F.binary_cross_entropy_with_logits(logits, target)
# Add to the epoch variable for printing average CE Loss
'''
Adversarial Loss is calculated only for the positive data points (label==1).
'''
if self.epoch >= self.warmup_epochs:
logits_start = logits_start[target == 1]
# AdvLoss
if not self.half:
adv_loss = self.one_class_adv_loss(data[target == 1].detach(), self.half)
else:
adv_loss = self.one_class_adv_loss(logits_start.detach(), self.half)
loss = ce_loss + adv_loss * self.lam
else:
# If only CE based training has to be done
loss = ce_loss
return loss
def one_class_adv_loss(self, x_train_data, half=True):
"""Computes the adversarial loss:
1) Sample points initially at random around the positive training
data points
2) Gradient ascent to find the most optimal point in set N_i(r)
classified as +ve (label=0). This is done by maximizing
the CE loss wrt label 0
3) Project the points between spheres of radius R and gamma * R
(set N_i(r))
4) Pass the calculated adversarial points through the model,
and calculate the CE loss wrt target class 0
Parameters
----------
x_train_data: Batch of data to compute loss on.
"""
batch_size = len(x_train_data)
# Randomly sample points around the training data
# We will perform SGD on these to find the adversarial points
x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_()
x_adv_sampled = x_adv + x_train_data
for step in range(self.ascent_num_steps):
with torch.enable_grad():
new_targets = torch.zeros(batch_size, 1).to(device)
# new_targets = (1 - targets).to(self.device)
new_targets = torch.squeeze(new_targets)
new_targets = new_targets.to(torch.float)
if half:
logits = self.model.forward_end(x_adv_sampled)
else:
logits = self.model(x_adv_sampled)
logits = torch.squeeze(logits, dim=1)
new_loss = F.binary_cross_entropy_with_logits(logits, new_targets)
grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0]
grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim())))
grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1))
grad_normalized = grad / grad_norm
with torch.no_grad():
x_adv_sampled.add_(self.ascent_step_size * grad_normalized)
if (step + 1) % 10 == 0:
# Project the normal points to the set N_i(r)
h = x_adv_sampled - x_train_data
norm_h = torch.sqrt(torch.sum(h ** 2,
dim=tuple(range(1, h.dim()))))
alpha = torch.clamp(norm_h, self.radius,
self.gamma * self.radius).to(device)
# Make use of broadcast to project h
proj = (alpha / norm_h).view(-1, *[1] * (h.dim() - 1))
h = proj * h
x_adv_sampled = x_train_data + h # These adv_points are now on the surface of hyper-sphere
if half:
adv_pred = self.model.forward_end(x_adv_sampled)
else:
adv_pred = self.model(x_adv_sampled)
adv_pred = torch.squeeze(adv_pred, dim=1)
adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets)
return adv_loss
# class DROCC(nn.Module):
# def __init__(self, ):
# super().__init__()
#
# self.model = CIFAR10_LeNet()
#
# def run_train(self,
# train_data,
# test_data,
# lamda=0.5,
# radius=8,
# gamma=2,
# verbose=False,
# learning_rate=1e-3,
# total_epochs=30,
# only_ce_epochs=6,
# ascent_step_size=0.001,
# ascent_num_steps=50,
# gamma_lr=1,
# batch_size=128,
# half=True):
#
# self.best_score = -np.inf
# best_model = None
# self.ascent_num_steps = ascent_num_steps
# self.ascent_step_size = ascent_step_size
# self.lamda = lamda
# self.radius = radius
# self.gamma = gamma
#
# self.optimizer = optim.Adam(self.model.parameters(), lr=learning_rate)
# lr_scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=gamma_lr)
#
# train_loader = torch.utils.data.DataLoader(train_data,
# batch_size=batch_size,
# shuffle=True)
#
# test_loader = torch.utils.data.DataLoader(test_data,
# batch_size=batch_size,
# shuffle=True)
#
# for epoch in range(total_epochs):
# # Make the weights trainable
# self.model.train()
#
# # Placeholder for the respective 2 loss values
# epoch_adv_loss = torch.tensor([0]).type(torch.float32).to(device) # AdvLoss
# epoch_ce_loss = 0 # Cross entropy Loss
#
# batch_idx = -1
# for data, target, _ in train_loader:
# batch_idx += 1
# data, target = data.to(device), target.to(device)
# # Data Processing
# data = data.to(torch.float)
# target = target.to(torch.float)
# target = torch.squeeze(target)
#
# self.optimizer.zero_grad()
#
# # Extract the logits for cross entropy loss
# logits_start = self.model.half_forward_start(data)
# logits = self.model.half_forward_end(logits_start)
#
# logits = torch.squeeze(logits, dim=1)
# ce_loss = F.binary_cross_entropy_with_logits(logits, target)
# # Add to the epoch variable for printing average CE Loss
# epoch_ce_loss += ce_loss
#
# '''
# Adversarial Loss is calculated only for the positive data points (label==1).
# '''
# if epoch >= only_ce_epochs:
# logits_start = logits_start[target == 1]
# # AdvLoss
# if not half:
# adv_loss = self.one_class_adv_loss(data[target == 1].detach(), target[target == 1], half)
# else:
# adv_loss = self.one_class_adv_loss(logits_start.detach(), target[target == 1], half)
# epoch_adv_loss += adv_loss
#
# loss = ce_loss + adv_loss * self.lamda
# else:
# # If only CE based training has to be done
# loss = ce_loss
#
# # Backprop
# loss.backward()
# self.optimizer.step()
#
# epoch_ce_loss = epoch_ce_loss / (batch_idx + 1) # Average CE Loss
# epoch_adv_loss = epoch_adv_loss / (batch_idx + 1) # Average AdvLoss
#
# if verbose:
# test_score = self.test(test_loader)
# if test_score > self.best_score:
# self.best_score = test_score
# best_model = copy.deepcopy(self.model)
#
# print('Epoch: {}, CE Loss: {}, AdvLoss: {}, {}: {}'.format(
# epoch, epoch_ce_loss.item(), epoch_adv_loss.item(),
# 'AUC', test_score))
# lr_scheduler.step()
# if verbose:
# self.model = copy.deepcopy(best_model)
# print('\nBest test {}: {}'.format(
# 'AUC', self.best_score
# ))
#
# def test(self, test_loader, metric='AUC'):
# """Evaluate the model on the given test dataset.
# Parameters
# ----------
# test_loader: Dataloader object for the test dataset.
# metric: Metric used for evaluation (AUC / F1).
# """
# self.model.eval()
# label_score = []
# batch_idx = -1
# for data, target, _ in test_loader:
# batch_idx += 1
# data, target = data.to(device), target.to(device)
# data = data.to(torch.float)
# target = target.to(torch.float)
# target = torch.squeeze(target)
#
# logits = self.model(data)
# logits = torch.squeeze(logits, dim=1)
# sigmoid_logits = torch.sigmoid(logits)
# scores = logits
# label_score += list(zip(target.cpu().data.numpy().tolist(),
# scores.cpu().data.numpy().tolist()))
# # Compute test score
# labels, scores = zip(*label_score)
# labels = np.array(labels)
# scores = np.array(scores)
# if metric == 'AUC':
# test_metric = roc_auc_score(labels, scores)
# if metric == 'alpha':
# test_metric = (scores > 0.5).mean()
# return test_metric
#
# def one_class_adv_loss(self, x_train_data, targets, half=True):
# """Computes the adversarial loss:
# 1) Sample points initially at random around the positive training
# data points
# 2) Gradient ascent to find the most optimal point in set N_i(r)
# classified as +ve (label=0). This is done by maximizing
# the CE loss wrt label 0
# 3) Project the points between spheres of radius R and gamma * R
# (set N_i(r))
# 4) Pass the calculated adversarial points through the model,
# and calculate the CE loss wrt target class 0
#
# Parameters
# ----------
# x_train_data: Batch of data to compute loss on.
# """
# batch_size = len(x_train_data)
# # Randomly sample points around the training data
# # We will perform SGD on these to find the adversarial points
# x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_()
# x_adv_sampled = x_adv + x_train_data
#
# for step in range(self.ascent_num_steps):
# with torch.enable_grad():
#
# new_targets = torch.zeros(batch_size, 1).to(device)
# # new_targets = (1 - targets).to(self.device)
# new_targets = torch.squeeze(new_targets)
# new_targets = new_targets.to(torch.float)
#
# if half:
# logits = self.model.half_forward_end(x_adv_sampled)
# else:
# logits = self.model(x_adv_sampled)
#
# logits = torch.squeeze(logits, dim=1)
# new_loss = F.binary_cross_entropy_with_logits(logits, new_targets)
#
# grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0]
# grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim())))
# grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1))
# grad_normalized = grad / grad_norm
# with torch.no_grad():
# x_adv_sampled.add_(self.ascent_step_size * grad_normalized)
#
# if (step + 1) % 10 == 0:
# # Project the normal points to the set N_i(r)
# h = x_adv_sampled - x_train_data
# norm_h = torch.sqrt(torch.sum(h ** 2,
# dim=tuple(range(1, h.dim()))))
# alpha = torch.clamp(norm_h, self.radius,
# self.gamma * self.radius).to(device)
# # Make use of broadcast to project h
# proj = (alpha / norm_h).view(-1, *[1] * (h.dim() - 1))
# h = proj * h
# x_adv_sampled = x_train_data + h # These adv_points are now on the surface of hyper-sphere
#
# if half:
# adv_pred = self.model.half_forward_end(x_adv_sampled)
# else:
# adv_pred = self.model(x_adv_sampled)
#
# adv_pred = torch.squeeze(adv_pred, dim=1)
# adv_loss = F.binary_cross_entropy_with_logits(adv_pred, (new_targets))
#
# return adv_loss
#
# def save(self, path):
# torch.save(self.model.state_dict(), os.path.join(path, 'model.pt'))
#
# def load(self, path):
# self.model.load_state_dict(torch.load(os.path.join(path, 'model.pt')))
class PU_DROCC(PUModelRandomBatch):
def __init__(self,
model=Net,
lam=0.5,
radius=8,
gamma=2,
warmup_epochs=6,
ascent_step_size=0.001,
ascent_num_steps=50,
half=True):
super().__init__(model, 0)
self.lam = lam
self.radius = radius
self.gamma = gamma
self.warmup_epochs = warmup_epochs
self.ascent_step_size = ascent_step_size
self.ascent_num_steps = ascent_num_steps
self.half = half
def batch_loss(self, batch):
data, target = batch[0], batch[2]
data, target = data.to(device), target.to(device)
lab_ind = target == 1
unl_ind = target == 0
# lab_cnt = max(lab_ind.sum(), 1)
unl_cnt = max(unl_ind.sum(), 1)
# Extract the logits for cross entropy loss
logits_start = self.model.forward_start(data)
logits = self.model.forward_end(logits_start[lab_ind])
logits = torch.squeeze(logits, dim=1)
ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind])
# Add to the epoch variable for printing average CE Loss
'''
Adversarial Loss is calculated only for the positive data points (label==1).
'''
if self.epoch >= self.warmup_epochs and unl_cnt > 1:
logits_start = logits_start[unl_ind]
# AdvLoss
if not self.half:
adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), self.half)
else:
adv_loss = self.one_class_adv_loss(logits_start[unl_ind].detach(), self.half)
loss = ce_loss + adv_loss * self.lam
else:
# If only CE based training has to be done
loss = ce_loss
return loss
def one_class_adv_loss(self, x_train_data, half=True):
"""Computes the adversarial loss:
1) Sample points initially at random around the positive training
data points
2) Gradient ascent to find the most optimal point in set N_i(r)
classified as +ve (label=0). This is done by maximizing
the CE loss wrt label 0
3) Project the points between spheres of radius R and gamma * R
(set N_i(r))
4) Pass the calculated adversarial points through the model,
and calculate the CE loss wrt target class 0
Parameters
----------
x_train_data: Batch of data to compute loss on.
"""
batch_size = len(x_train_data)
# Randomly sample points around the training data
# We will perform SGD on these to find the adversarial points
x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_()
x_adv_sampled = x_adv + x_train_data
for step in range(self.ascent_num_steps):
with torch.enable_grad():
new_targets = torch.zeros(batch_size, 1).to(device)
# new_targets = (1 - targets).to(self.device)
new_targets = torch.squeeze(new_targets)
new_targets = new_targets.to(torch.float)
if half:
logits = self.model.forward_end(x_adv_sampled)
else:
logits = self.model(x_adv_sampled)
logits = torch.squeeze(logits, dim=1)
new_loss = F.binary_cross_entropy_with_logits(logits, new_targets)
grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0]
grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim())))
grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1))
grad_normalized = grad / grad_norm
with torch.no_grad():
x_adv_sampled.add_(self.ascent_step_size * grad_normalized)
if (step + 1) % 10 == 0:
# Project the normal points to the set N_i(r)
h = x_adv_sampled - x_train_data
norm_h = torch.sqrt(torch.sum(h ** 2,
dim=tuple(range(1, h.dim()))))
alpha = torch.clamp(norm_h, self.radius,
self.gamma * self.radius).to(device)
# Make use of broadcast to project h
proj = (alpha / norm_h).view(-1, *[1] * (h.dim() - 1))
h = proj * h
x_adv_sampled = x_train_data + h # These adv_points are now on the surface of hyper-sphere
if half:
adv_pred = self.model.forward_end(x_adv_sampled)
else:
adv_pred = self.model(x_adv_sampled)
adv_pred = torch.squeeze(adv_pred, dim=1)
adv_loss = F.binary_cross_entropy_with_logits(adv_pred, new_targets)
return adv_loss
# class PU_DROCC(nn.Module):
# def __init__(self, ):
# super().__init__()
#
# self.model = CIFAR10_LeNet()
#
# def run_train(self,
# train_data,
# test_data,
# lamda=0.5,
# radius=1,
# gamma=2,
# verbose=False,
# learning_rate=5e-4,
# total_epochs=20,
# only_ce_epochs=2,
# ascent_step_size=5e-6,
# ascent_num_steps=10,
# gamma_lr=0.96,
# batch_size=512,
# half=True):
#
# self.best_score = -np.inf
# best_model = None
# self.ascent_num_steps = ascent_num_steps
# self.ascent_step_size = ascent_step_size
# self.lamda = lamda
# self.radius = radius
# self.gamma = gamma
#
# self.optimizer = optim.Adam(self.model.parameters(), lr=learning_rate)
# lr_scheduler = optim.lr_scheduler.ExponentialLR(self.optimizer, gamma=gamma_lr)
#
# train_loader = torch.utils.data.DataLoader(train_data,
# batch_size=batch_size,
# shuffle=True)
#
# test_loader = torch.utils.data.DataLoader(test_data,
# batch_size=batch_size,
# shuffle=True)
#
# for epoch in range(total_epochs):
# # Make the weights trainable
# self.model.train()
#
# # Placeholder for the respective 2 loss values
# epoch_adv_loss = torch.tensor([0]).type(torch.float32).to(device) # AdvLoss
# epoch_ce_loss = 0 # Cross entropy Loss
#
# batch_idx = -1
# for data, _, target in train_loader:
# batch_idx += 1
# data, target = data.to(device), target.to(device)
# # Data Processing
# data = data.to(torch.float)
# target = target.to(torch.float)
# target = torch.squeeze(target)
#
# self.optimizer.zero_grad()
#
# lab_ind = target == 1
# unl_ind = target == 0
#
# # lab_cnt = max(lab_ind.sum(), 1)
# unl_cnt = max(unl_ind.sum(), 1)
#
# # Extract the logits for cross entropy loss
# logits_start = self.model.half_forward_start(data)
# logits = self.model.half_forward_end(logits_start[lab_ind])
#
# logits = torch.squeeze(logits, dim=1)
# ce_loss = F.binary_cross_entropy_with_logits(logits, target[lab_ind])
# # Add to the epoch variable for printing average CE Loss
# epoch_ce_loss += ce_loss
#
# '''
# Adversarial Loss is calculated only for the positive data points (label==1).
# '''
# if epoch >= only_ce_epochs and unl_cnt > 1:
# logits_start = logits_start[unl_ind]
# # AdvLoss
# if not half:
# adv_loss = self.one_class_adv_loss(data[unl_ind].detach(), target[unl_ind], half)
# else:
# adv_loss = self.one_class_adv_loss(logits_start.detach(), target[unl_ind], half)
# epoch_adv_loss += adv_loss
#
# loss = ce_loss + adv_loss * self.lamda
# else:
# # If only CE based training has to be done
# loss = ce_loss
#
# # Backprop
# loss.backward()
# self.optimizer.step()
#
# epoch_ce_loss = epoch_ce_loss / (batch_idx + 1) # Average CE Loss
# epoch_adv_loss = epoch_adv_loss / (batch_idx + 1) # Average AdvLoss
#
# if verbose:
# test_score = self.test(test_loader)
# if test_score > self.best_score:
# self.best_score = test_score
# best_model = copy.deepcopy(self.model)
#
# print('Epoch: {}, CE Loss: {}, AdvLoss: {}, {}: {}'.format(
# epoch, epoch_ce_loss.item(), epoch_adv_loss.item(),
# 'AUC', test_score))
# lr_scheduler.step()
# if verbose:
# self.model = copy.deepcopy(best_model)
# print('\nBest test {}: {}'.format(
# 'AUC', self.best_score
# ))
#
# def test(self, test_loader, metric='AUC'):
# """Evaluate the model on the given test dataset.
# Parameters
# ----------
# test_loader: Dataloader object for the test dataset.
# metric: Metric used for evaluation (AUC / F1).
# """
# self.model.eval()
# label_score = []
# batch_idx = -1
# for data, target, _ in test_loader:
# batch_idx += 1
# data, target = data.to(device), target.to(device)
# data = data.to(torch.float)
# target = target.to(torch.float)
# target = torch.squeeze(target)
#
# logits = self.model(data)
# logits = torch.squeeze(logits, dim=1)
# sigmoid_logits = torch.sigmoid(logits)
# scores = logits
# label_score += list(zip(target.cpu().data.numpy().tolist(),
# scores.cpu().data.numpy().tolist()))
# # Compute test score
# labels, scores = zip(*label_score)
# labels = np.array(labels)
# scores = np.array(scores)
# if metric == 'AUC':
# test_metric = roc_auc_score(labels, scores)
# if metric == 'alpha':
# test_metric = (scores > 0.5).mean()
# return test_metric
#
# def one_class_adv_loss(self, x_train_data, targets, half=True):
# """Computes the adversarial loss:
# 1) Sample points initially at random around the positive training
# data points
# 2) Gradient ascent to find the most optimal point in set N_i(r)
# classified as +ve (label=0). This is done by maximizing
# the CE loss wrt label 0
# 3) Project the points between spheres of radius R and gamma * R
# (set N_i(r))
# 4) Pass the calculated adversarial points through the model,
# and calculate the CE loss wrt target class 0
#
# Parameters
# ----------
# x_train_data: Batch of data to compute loss on.
# """
# batch_size = len(x_train_data)
# # Randomly sample points around the training data
# # We will perform SGD on these to find the adversarial points
# x_adv = torch.randn(x_train_data.shape).to(device).detach().requires_grad_()
# x_adv_sampled = x_adv + x_train_data
#
# for step in range(self.ascent_num_steps):
# with torch.enable_grad():
#
# new_targets = torch.zeros(batch_size, 1).to(device)
# # new_targets = (1 - targets).to(self.device)
# new_targets = torch.squeeze(new_targets)
# new_targets = new_targets.to(torch.float)
#
# if half:
# logits = self.model.half_forward_end(x_adv_sampled)
# else:
# logits = self.model(x_adv_sampled)
#
# logits = torch.squeeze(logits, dim=1)
# new_loss = F.binary_cross_entropy_with_logits(logits, new_targets)
#
# grad = torch.autograd.grad(new_loss, [x_adv_sampled])[0]
# grad_norm = torch.norm(grad, p=2, dim=tuple(range(1, grad.dim())))
# grad_norm = grad_norm.view(-1, *[1] * (grad.dim() - 1))
# grad_normalized = grad / grad_norm
# with torch.no_grad():
# x_adv_sampled.add_(self.ascent_step_size * grad_normalized)
#
# if (step + 1) % 10 == 0:
# # Project the normal points to the set N_i(r)
# h = x_adv_sampled - x_train_data
# norm_h = torch.sqrt(torch.sum(h ** 2,
# dim=tuple(range(1, h.dim()))))
# alpha = torch.clamp(norm_h, self.radius,
# self.gamma * self.radius).to(device)
# # Make use of broadcast to project h
# proj = (alpha / norm_h).view(-1, *[1] * (h.dim() - 1))
# h = proj * h
# x_adv_sampled = x_train_data + h # These adv_points are now on the surface of hyper-sphere
#
# if half:
# adv_pred = self.model.half_forward_end(x_adv_sampled)
# else:
# adv_pred = self.model(x_adv_sampled)
#
# adv_pred = torch.squeeze(adv_pred, dim=1)
# adv_loss = F.binary_cross_entropy_with_logits(adv_pred, (new_targets))
#
# return adv_loss
#
# def save(self, path):
# torch.save(self.model.state_dict(), os.path.join(path, 'model.pt'))
#
# def load(self, path):
# self.model.load_state_dict(torch.load(os.path.join(path, 'model.pt')))
| 41.004329
| 115
| 0.531637
| 3,394
| 28,416
| 4.232764
| 0.076606
| 0.027565
| 0.027565
| 0.013365
| 0.969651
| 0.966588
| 0.962342
| 0.962342
| 0.962342
| 0.962342
| 0
| 0.012642
| 0.365322
| 28,416
| 692
| 116
| 41.063584
| 0.78392
| 0.687289
| 0
| 0.840764
| 0
| 0
| 0.000878
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.038217
| false
| 0
| 0.038217
| 0
| 0.11465
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a599a53d2dbe8f55d0ae2285bfa16d2cd2d65c31
| 2,322
|
py
|
Python
|
src/bgapi/gatt_server/rsp.py
|
GetAmbush/python-bgapi
|
985e5849275eb5e7cf794c30ef87e16ffa91fa63
|
[
"MIT"
] | 5
|
2018-05-11T14:59:50.000Z
|
2021-04-29T07:51:43.000Z
|
src/bgapi/gatt_server/rsp.py
|
GetAmbush/python-bgapi
|
985e5849275eb5e7cf794c30ef87e16ffa91fa63
|
[
"MIT"
] | null | null | null |
src/bgapi/gatt_server/rsp.py
|
GetAmbush/python-bgapi
|
985e5849275eb5e7cf794c30ef87e16ffa91fa63
|
[
"MIT"
] | 2
|
2018-10-05T16:51:08.000Z
|
2020-08-10T18:24:16.000Z
|
from struct import (unpack_from, calcsize, error)
def find_attribute(data: bytes, offset: int = 0):
FORMAT = '<HH'
result, sent_len = unpack_from(FORMAT, data, offset=offset)
offset += calcsize(FORMAT)
payload = {
'result': result,
'sent_len': sent_len,
}
return payload, offset
def read_attribute_type(data: bytes, offset: int = 0):
FORMAT = '<HB'
result, n = unpack_from(FORMAT, data, offset=offset)
offset += calcsize(FORMAT)
_type = data[offset:offset + n]
offset += n
if len(_type) < n:
raise error
payload = {
'result': result,
'type': _type,
}
return payload, offset
def read_attribute_value(data: bytes, offset: int = 0):
FORMAT = '<HB'
result, n = unpack_from(FORMAT, data, offset=offset)
offset += calcsize(FORMAT)
value = data[offset:offset + n]
offset += n
if len(value) < n:
raise error
payload = {
'result': result,
'value': value,
}
return payload, offset
def send_characteristic_notification(data: bytes, offset: int = 0):
FORMAT = '<HH'
result, sent_len = unpack_from(FORMAT, data, offset=offset)
offset += calcsize(FORMAT)
payload = {
'result': result,
'sent_len': sent_len,
}
return payload, offset
def send_user_read_response(data: bytes, offset: int = 0):
FORMAT = '<HH'
result, sent_len = unpack_from(FORMAT, data, offset=offset)
offset += calcsize(FORMAT)
payload = {
'result': result,
'sent_len': sent_len,
}
return payload, offset
def send_user_write_response(data: bytes, offset: int = 0):
FORMAT = '<H'
result, = unpack_from(FORMAT, data, offset=offset)
offset += calcsize(FORMAT)
payload = {
'result': result,
}
return payload, offset
def set_capabilities(data: bytes, offset: int = 0):
FORMAT = '<H'
result, = unpack_from(FORMAT, data, offset=offset)
offset += calcsize(FORMAT)
payload = {
'result': result,
}
return payload, offset
def write_attribute_value(data: bytes, offset: int = 0):
FORMAT = '<H'
result, = unpack_from(FORMAT, data, offset=offset)
offset += calcsize(FORMAT)
payload = {
'result': result,
}
return payload, offset
| 23.454545
| 67
| 0.609388
| 273
| 2,322
| 5.047619
| 0.135531
| 0.156749
| 0.11611
| 0.104499
| 0.891147
| 0.872279
| 0.800435
| 0.788824
| 0.726415
| 0.726415
| 0
| 0.004703
| 0.267442
| 2,322
| 98
| 68
| 23.693878
| 0.805409
| 0
| 0
| 0.705128
| 0
| 0
| 0.043928
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.102564
| false
| 0
| 0.012821
| 0
| 0.217949
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3c055540f4bac239471bd40673cc11cba8364b6a
| 1,042
|
py
|
Python
|
python/testData/inspections/PyDictDuplicateKeysInspection/test.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | 2
|
2018-12-29T09:53:39.000Z
|
2018-12-29T09:53:42.000Z
|
python/testData/inspections/PyDictDuplicateKeysInspection/test.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | 11
|
2017-02-27T22:35:32.000Z
|
2021-12-24T08:07:40.000Z
|
python/testData/inspections/PyDictDuplicateKeysInspection/test.py
|
teddywest32/intellij-community
|
e0268d7a1da1d318b441001448cdd3e8929b2f29
|
[
"Apache-2.0"
] | 1
|
2020-11-27T10:36:50.000Z
|
2020-11-27T10:36:50.000Z
|
dict = {<warning descr="Dictionary contains duplicate keys key_1">key_1</warning> : 1, key_2: 2, <warning descr="Dictionary contains duplicate keys key_1">key_1</warning> : 3}
dict = {'key_1' : 1, <warning descr="Dictionary contains duplicate keys 'key_2'">'key_2'</warning>: 2, <warning descr="Dictionary contains duplicate keys 'key_2'">'key_2'</warning> : 3}
a = {}
{'key_1' : 1, 'key_2': 2}
import random
def foo():
return random.random()
{foo(): 1, foo():2}
# PY-2511
dict = dict([(<warning descr="Dictionary contains duplicate keys key">'key'</warning>, 666), (<warning descr="Dictionary contains duplicate keys key">'key'</warning>, 123)])
dict = dict(((<warning descr="Dictionary contains duplicate keys key">'key'</warning>, 666), (<warning descr="Dictionary contains duplicate keys key">'key'</warning>, 123)))
dict = dict(((<warning descr="Dictionary contains duplicate keys key">'key'</warning>, 666), ('k', 123)), <warning descr="Dictionary contains duplicate keys key">key</warning>=4)
dict([('key', 666), ('ky', 123)])
| 57.888889
| 185
| 0.692898
| 149
| 1,042
| 4.765101
| 0.161074
| 0.169014
| 0.309859
| 0.422535
| 0.86338
| 0.86338
| 0.86338
| 0.86338
| 0.85493
| 0.776056
| 0
| 0.055315
| 0.115163
| 1,042
| 17
| 186
| 61.294118
| 0.714751
| 0.006718
| 0
| 0
| 0
| 0
| 0.424008
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.083333
| null | null | 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3c1b6c9e6b47fb95aa16ef2acb4b7d713d883c56
| 565
|
py
|
Python
|
eval_covid20cases_timm-regnetx_002_CoarseDropout.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid20cases_timm-regnetx_002_CoarseDropout.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
eval_covid20cases_timm-regnetx_002_CoarseDropout.py
|
BrunoKrinski/segtool
|
cb604b5f38104c43a76450136e37c3d1c4b6d275
|
[
"MIT"
] | null | null | null |
import os
ls=["python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_0_CoarseDropout.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_1_CoarseDropout.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_2_CoarseDropout.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_3_CoarseDropout.yml",
"python main.py --configs configs/eval_covid20cases_unetplusplus_timm-regnetx_002_4_CoarseDropout.yml",
]
for l in ls:
os.system(l)
| 51.363636
| 107
| 0.853097
| 80
| 565
| 5.65
| 0.3
| 0.110619
| 0.132743
| 0.210177
| 0.893805
| 0.893805
| 0.893805
| 0.893805
| 0.893805
| 0.893805
| 0
| 0.05618
| 0.054867
| 565
| 11
| 108
| 51.363636
| 0.790262
| 0
| 0
| 0
| 0
| 0
| 0.883392
| 0.662544
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.111111
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
3c56748dc2e31bf612f4bf75fa49cb321ba58ffd
| 24,804
|
py
|
Python
|
training.py
|
zhangsiyu1103/ESNAC
|
ecec9860d1fb9f82a61c61dc2bcd1b17d58352dd
|
[
"MIT"
] | null | null | null |
training.py
|
zhangsiyu1103/ESNAC
|
ecec9860d1fb9f82a61c61dc2bcd1b17d58352dd
|
[
"MIT"
] | null | null | null |
training.py
|
zhangsiyu1103/ESNAC
|
ecec9860d1fb9f82a61c61dc2bcd1b17d58352dd
|
[
"MIT"
] | null | null | null |
import torch
import torch.nn as nn
import torch.optim as optim
import torch.nn.functional as F
import torch.backends.cudnn as cudnn
import options as opt
import os
import time
import sys
import numpy as np
def init_model(model):
for module in model.modules():
if isinstance(module, nn.Conv2d):
nn.init.kaiming_normal_(module.weight, mode='fan_out',
nonlinearity='relu')
if module.bias is not None:
nn.init.constant_(module.bias, 0)
elif isinstance(module, nn.BatchNorm2d):
nn.init.constant_(module.weight, 1)
nn.init.constant_(module.bias, 0)
elif isinstance(module, nn.Linear):
nn.init.normal_(module.weight, 0, 0.01)
nn.init.constant_(module.bias, 0)
return model
def test_model(model, dataset):
model.eval()
correct = 0
total = 0
loader = None
if hasattr(dataset, 'test_loader'):
loader = dataset.test_loader
elif hasattr(dataset, 'val_loader'):
loader = dataset.val_loader
else:
raise NotImplementedError('Unknown dataset!')
train_loader = dataset.train_loader
train_correct = 0
train_total = 0
best_train_acc = 0
#print(len(loader))
with torch.no_grad():
for batch_idx, (inputs, targets) in enumerate(train_loader):
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
outputs = model(inputs)
_, predicted = outputs.max(1)
train_total += targets.size(0)
train_correct += predicted.eq(targets).sum().item()
train_acc = 100.0 * train_correct / train_total
#print("train acc:", train_acc)
#if train_acc > best_train_acc:
# best_train_acc = train_acc
# model_best = model.module
# torch.save(model_best, 'temp_save/temp.pth')
#if train_acc == 100:
# torch.save(model.mudule, 'temp_save/base.pth')
with torch.no_grad():
for batch_idx, (inputs, targets) in enumerate(loader):
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
outputs = model(inputs)
_, predicted = outputs.max(1)
total += targets.size(0)
correct += predicted.eq(targets).sum().item()
acc = 100.0 * correct / total
return acc
def test_model_regression(model, dataset):
model.eval()
loader = None
loss_total = 0
batch_cnt = 0
criterion = nn.MSELoss()
if hasattr(dataset, 'test_loader'):
loader = dataset.test_loader
elif hasattr(dataset, 'val_loader'):
loader = dataset.val_loader
else:
raise NotImplementedError('Unknown dataset!')
#loader = dataset.train_loader
#print(len(loader))
train_loader = dataset.train_loader
train_loss_total = 0
train_batch_cnt = 0
#print(len(loader))
with torch.no_grad():
for batch_idx, (inputs, targets) in enumerate(train_loader):
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
outputs = model(inputs)
train_loss = criterion(outputs,targets)
train_loss_total+=train_loss.item()
train_batch_cnt += 1
train_loss_avg = train_loss_total / train_batch_cnt
print("train loss:", train_loss_avg)
with torch.no_grad():
for batch_idx, (inputs, targets) in enumerate(loader):
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
outputs = model(inputs)
loss = criterion(outputs,targets)
loss_total+=loss.item()
batch_cnt += 1
#_, predicted = outputs.max(1)
#total += targets.size(0)
#correct += predicted.eq(targets).sum().item()
#acc = 100.0 * correct / total
return loss_total/batch_cnt
def test_model_image(model, dataset):
model.eval()
correct = 0
total = 0
loader = None
if hasattr(dataset, 'test_loader'):
loader = dataset.test_loader
elif hasattr(dataset, 'val_loader'):
loader = dataset.val_loader
else:
raise NotImplementedError('Unknown dataset!')
miss = set()
with torch.no_grad():
for batch_idx, (inputs, targets) in enumerate(loader):
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
outputs = model(inputs)
_, predicted = outputs.max(1)
total += targets.size(0)
equal_v = predicted.eq(targets)
correct += equal_v.sum().item()
j = 0
#print(equal_v
for i in equal_v:
if not i:
miss.add(inputs[j])
#print(inputs[j])
j+=1
acc = 100.0 * correct / total
return acc, miss
def test_model_latency(model, dataset):
model.eval()
loader = None
if hasattr(dataset, 'test_loader'):
loader = dataset.test_loader
elif hasattr(dataset, 'val_loader'):
loader = dataset.val_loader
else:
raise NotImplementedError('Unknown dataset!')
latency = []
with torch.no_grad():
for batch_idx, (inputs, targets) in enumerate(loader):
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
start_time = time.time()
outputs = model(inputs)
torch.cuda.synchronize()
time_taken = time.time() - start_time
latency.append(time_taken * 1000)
lat = np.mean(latency)
return lat
def train_model_teacher(model_, dataset, save_path, epochs=60, lr=0.005,
momentum=0.9, weight_decay=5e-4):
acc_best = 0
model_best = None
model = torch.nn.DataParallel(model_.to(opt.device))
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=lr,
weight_decay=weight_decay)
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=100, gamma=0.1)
for i in range(1, epochs + 1):
print('epochs ', i)
model.train()
loss_total = 0
batch_cnt = 0
for batch_idx, (inputs, targets) in enumerate(dataset.train_loader):
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
optimizer.zero_grad()
outputs = model(inputs)
loss = criterion(outputs, targets)
loss.backward()
optimizer.step()
loss_total += loss.item()
batch_cnt += 1
scheduler.step()
opt.writer.add_scalar('training/loss', loss_total / batch_cnt, i)
acc = test_model(model, dataset)
opt.writer.add_scalar('training/acc', acc, i)
print("loss: ", loss_total/batch_cnt)
print("test acc: ", acc)
if acc > acc_best:
acc_best = acc
model.module.acc = acc
model_best = model.module
torch.save(model_best, save_path)
return model_best, acc_best
def train_model_student(model_, dataset, save_path, idx,
optimization=opt.tr_fu_optimization,
epochs=opt.tr_fu_epochs, lr=opt.tr_fu_lr,
momentum=opt.tr_fu_momentum,
weight_decay=opt.tr_fu_weight_decay,
lr_schedule=opt.tr_fu_lr_schedule,
from_scratch=opt.tr_fu_from_scratch):
acc_best = 0
best_train_acc = 0
model_best = None
model = torch.nn.DataParallel(model_.to(opt.device))
criterion = nn.CrossEntropyLoss()
if optimization == 'SGD':
optimizer = optim.SGD(model.parameters(), lr=lr, momentum=momentum,
weight_decay=weight_decay)
elif optimization == 'Adam':
optimizer = optim.Adam(model.parameters(), lr=lr,
weight_decay=weight_decay)
if lr_schedule == 'step':
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=20,
gamma=0.2)
elif lr_schedule == 'linear':
batch_cnt = len(dataset.train_loader)
n_total_exp = epochs * batch_cnt
lr_lambda = lambda n_exp_seen: 1 - n_exp_seen/n_total_exp
scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lr_lambda)
if from_scratch:
init_model(model)
for i in range(1, epochs + 1):
print('epoch',i)
model.train()
#if lr_schedule == 'step':
# scheduler.step()
loss_total = 0
batch_cnt = 0
correct = 0
total = 0
for batch_idx, (inputs, targets) in enumerate(dataset.train_loader):
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
optimizer.zero_grad()
outputs = model(inputs)
_, predicted = outputs.max(1)
total += targets.size(0)
correct += predicted.eq(targets).sum().item()
loss = criterion(outputs, targets)
loss.backward()
optimizer.step()
#if lr_schedule == 'linear':
loss_total += loss.item()
batch_cnt += 1
scheduler.step()
opt.writer.add_scalar('training_%d/loss' % (idx), loss_total / batch_cnt, i)
acc = test_model(model, dataset)
opt.writer.add_scalar('training_%d/acc' % (idx), acc, i)
print('loss: ', loss_total/batch_cnt)
print('acc: ',acc)
if acc > acc_best:
acc_best = acc
model.module.acc = acc
model_best = model.module
torch.save(model_best, save_path)
return model_best, acc_best
def train_model_student_regression(model_, dataset, save_path, idx,
optimization=opt.tr_fu_optimization,
epochs=opt.tr_fu_epochs, lr=opt.tr_fu_lr,
momentum=opt.tr_fu_momentum,
weight_decay=opt.tr_fu_weight_decay,
lr_schedule=opt.tr_fu_lr_schedule,
from_scratch=opt.tr_fu_from_scratch):
loss_best = sys.maxsize
model_best = None
model = torch.nn.DataParallel(model_.to(opt.device))
criterion = nn.MSELoss()
if optimization == 'SGD':
optimizer = optim.SGD(model.parameters(), lr=lr, momentum=momentum,
weight_decay=weight_decay)
elif optimization == 'Adam':
optimizer = optim.Adam(model.parameters(), lr=lr,
weight_decay=weight_decay)
if lr_schedule == 'step':
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=10,
gamma=0.4)
elif lr_schedule == 'linear':
batch_cnt = len(dataset.train_loader)
n_total_exp = epochs * batch_cnt
lr_lambda = lambda n_exp_seen: 1 - n_exp_seen/n_total_exp
scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lr_lambda)
if from_scratch:
init_model(model)
for i in range(1, epochs + 1):
print('epoch',i)
model.train()
#if lr_schedule == 'step':
# scheduler.step()
loss_total = 0
batch_cnt = 0
correct = 0
total = 0
for batch_idx, (inputs, targets) in enumerate(dataset.train_loader):
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
optimizer.zero_grad()
outputs = model(inputs)
#i_, predicted = outputs.max(1)
#total += targets.size(0)
#correct += predicted.eq(targets).sum().item()
loss = criterion(outputs, targets)
loss.backward()
optimizer.step()
#if lr_schedule == 'linear':
loss_total += loss.item()
batch_cnt += 1
#print("train acc: ", 100*correct/total)
scheduler.step()
opt.writer.add_scalar('training_%d/loss' % (idx), loss_total/batch_cnt , i)
test_loss = test_model_regression(model, dataset)
#opt.writer.add_scalar('training_%d/acc' % (idx), acc, i)
print('train loss: ', loss_total/batch_cnt)
print('test loss: ',test_loss)
if test_loss < loss_best:
loss_best = test_loss
model.module.loss = test_loss
model_best = model.module
torch.save(model_best, save_path)
return model_best, loss_best
def train_model_student_kd(teacher_, model_, dataset, save_path, idx,
optimization=opt.tr_fu_optimization,
epochs=opt.tr_fu_epochs, lr=opt.tr_fu_lr,
momentum=opt.tr_fu_momentum,
weight_decay=opt.tr_fu_weight_decay,
lr_schedule=opt.tr_fu_lr_schedule,
from_scratch=opt.tr_fu_from_scratch):
acc_best = 0
model_best = None
model = torch.nn.DataParallel(model_.to(opt.device))
teacher = torch.nn.DataParallel(teacher_.to(opt.device))
criterion1 = nn.CrossEntropyLoss()
criterion2 = nn.MSELoss()
if optimization == 'SGD':
optimizer = optim.SGD(model.parameters(), lr=lr, momentum=momentum,
weight_decay=weight_decay)
elif optimization == 'Adam':
optimizer = optim.Adam(model.parameters(), lr=lr,
weight_decay=weight_decay)
if lr_schedule == 'step':
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=100,
gamma=0.1)
elif lr_schedule == 'linear':
batch_cnt = len(dataset.train_loader)
n_total_exp = epochs * batch_cnt
lr_lambda = lambda n_exp_seen: 1 - n_exp_seen/n_total_exp
scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lr_lambda)
if from_scratch:
init_model(model)
for i in range(1, epochs + 1):
model.train()
#if lr_schedule == 'step':
# scheduler.step()
loss_total = 0
batch_cnt = 0
for batch_idx, (inputs, targets) in enumerate(dataset.train_loader):
teacher_outputs = None
with torch.no_grad():
teacher_outputs = teacher(inputs)
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
optimizer.zero_grad()
outputs = model(inputs)
loss1 = criterion1(outputs, targets)
loss2 = criterion2(outputs, teacher_outputs)
loss = loss1 + loss2
loss.backward()
optimizer.step()
#if lr_schedule == 'linear':
scheduler.step()
loss_total += loss.item()
batch_cnt += 1
opt.writer.add_scalar('training_%d/loss' % (idx), loss_total / batch_cnt, i)
acc = test_model(model, dataset)
opt.writer.add_scalar('training_%d/acc' % (idx), acc, i)
#print('loss: ', loss_total/batch_cnt)
#print('acc: ',acc)
if acc > acc_best:
acc_best = acc
model.module.acc = acc
model_best = model.module
torch.save(model_best, save_path)
return model_best, acc_best
def train_model_student_kd_reg(teacher_, model_, dataset, save_path, idx,
optimization=opt.tr_fu_optimization,
epochs=opt.tr_fu_epochs, lr=opt.tr_fu_lr,
momentum=opt.tr_fu_momentum,
weight_decay=opt.tr_fu_weight_decay,
lr_schedule=opt.tr_fu_lr_schedule,
from_scratch=opt.tr_fu_from_scratch):
loss_best = sys.maxsize
model_best = None
model = torch.nn.DataParallel(model_.to(opt.device))
teacher = torch.nn.DataParallel(teacher_.to(opt.device))
criterion = nn.MSELoss()
if optimization == 'SGD':
optimizer = optim.SGD(model.parameters(), lr=lr, momentum=momentum,
weight_decay=weight_decay)
elif optimization == 'Adam':
optimizer = optim.Adam(model.parameters(), lr=lr,
weight_decay=weight_decay)
if lr_schedule == 'step':
scheduler = optim.lr_scheduler.StepLR(optimizer, step_size=100,
gamma=0.1)
elif lr_schedule == 'linear':
batch_cnt = len(dataset.train_loader)
n_total_exp = epochs * batch_cnt
lr_lambda = lambda n_exp_seen: 1 - n_exp_seen/n_total_exp
scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lr_lambda)
if from_scratch:
init_model(model)
for i in range(1, epochs + 1):
model.train()
#if lr_schedule == 'step':
# scheduler.step()
loss_total = 0
#batch_cnt = 0
for batch_idx, (inputs, targets) in enumerate(dataset.train_loader):
teacher_outputs = None
with torch.no_grad():
teacher_outputs = teacher(inputs)
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
optimizer.zero_grad()
outputs = model(inputs)
loss1 = criterion(outputs, targets)
loss2 = criterion(outputs, teacher_outputs)
loss = loss1 + loss2
loss.backward()
optimizer.step()
#if lr_schedule == 'linear':
scheduler.step()
loss_total += loss.item()
#batch_cnt += 1
opt.writer.add_scalar('training_%d/loss' % (idx), loss_total, i)
test_loss = test_model_regression(model, dataset)
#opt.writer.add_scalar('training_%d/acc' % (idx), acc, i)
#print('loss: ', loss_total/batch_cnt)
#print('acc: ',acc)
if test_loss > loss_best:
loss_best = test_loss
model.module.loss = test_loss
model_best = model.module
torch.save(model_best, save_path)
return model_best, loss_best
def train_model_search(teacher_, students_, dataset,
optimization=opt.tr_se_optimization,
epochs=opt.tr_se_epochs, lr=opt.tr_se_lr,
momentum=opt.tr_se_momentum,
weight_decay=opt.tr_se_weight_decay,
lr_schedule=opt.tr_se_lr_schedule,
loss_criterion=opt.tr_se_loss_criterion):
n = len(students_)
accs_best = [0.0] * n
students_best = [None] * n
teacher = torch.nn.DataParallel(teacher_.to(opt.device))
students = [None] * n
for j in range(n):
students[j] = torch.nn.DataParallel(students_[j].to(opt.device))
if loss_criterion == 'KD':
criterion = nn.MSELoss()
elif loss_criterion == 'CE':
criterion = nn.CrossEntropyLoss()
if optimization == 'SGD':
optimizers = [optim.SGD(students[j].parameters(), lr=lr,
momentum=momentum, weight_decay=weight_decay)
for j in range(n)]
elif optimization == 'Adam':
optimizers = [optim.Adam(students[j].parameters(), lr=lr,
weight_decay=weight_decay) for j in range(n)]
if lr_schedule == 'linear':
batch_cnt = len(dataset.train_loader)
n_total_exp = epochs * batch_cnt
lr_lambda = lambda n_exp_seen: 1 - n_exp_seen/n_total_exp
schedulers = [optim.lr_scheduler.LambdaLR(optimizers[j],
lr_lambda=lr_lambda)
for j in range(n)]
for i in range(1, epochs + 1):
print("epochs:",i)
teacher.eval()
for j in range(n):
students[j].train()
loss_total = [0.0] * n
batch_cnt = 0
for batch_idx, (inputs, targets) in enumerate(dataset.train_loader):
inputs = inputs.to(opt.device)
if loss_criterion == 'KD':
teacher_outputs = None
with torch.no_grad():
teacher_outputs = teacher(inputs)
elif loss_criterion == 'CE':
targets = targets.to(opt.device)
for j in range(n):
if lr_schedule == 'linear':
schedulers[j].step()
optimizers[j].zero_grad()
#print(students[j])
student_outputs = students[j](inputs)
if loss_criterion == 'KD':
loss = criterion(student_outputs, teacher_outputs)
elif loss_criterion == 'CE':
loss = criterion(student_outputs, targets)
loss.backward()
optimizers[j].step()
loss_total[j] += loss.item()
batch_cnt += 1
for j in range(n):
opt.writer.add_scalar('step_%d/sample_%d_loss' % (opt.i, j),
loss_total[j] / batch_cnt, i)
acc = test_model(students[j], dataset)
#print("acc"+str(j)+": ", acc)
opt.writer.add_scalar('step_%d/sample_%d_acc' % (opt.i, j), acc, i)
if acc > accs_best[j]:
accs_best[j] = acc
students_best[j] = students[j].module
return students_best, accs_best
def train_model_search_reg(teacher_, students_, dataset,
optimization=opt.tr_se_optimization,
epochs=opt.tr_se_epochs, lr=opt.tr_se_lr,
momentum=opt.tr_se_momentum,
weight_decay=opt.tr_se_weight_decay,
lr_schedule=opt.tr_se_lr_schedule,
loss_criterion=opt.tr_se_loss_criterion):
n = len(students_)
loss_best = [sys.maxsize] * n
students_best = [None] * n
teacher = torch.nn.DataParallel(teacher_.to(opt.device))
students = [None] * n
for j in range(n):
students[j] = torch.nn.DataParallel(students_[j].to(opt.device))
if loss_criterion == 'KD' or loss_criterion == 'l2':
criterion = nn.MSELoss()
elif loss_criterion == 'CE':
criterion = nn.CrossEntropyLoss()
if optimization == 'SGD':
optimizers = [optim.SGD(students[j].parameters(), lr=lr,
momentum=momentum, weight_decay=weight_decay)
for j in range(n)]
elif optimization == 'Adam':
optimizers = [optim.Adam(students[j].parameters(), lr=lr,
weight_decay=weight_decay) for j in range(n)]
if lr_schedule == 'linear':
batch_cnt = len(dataset.train_loader)
n_total_exp = epochs * batch_cnt
lr_lambda = lambda n_exp_seen: 1 - n_exp_seen/n_total_exp
schedulers = [optim.lr_scheduler.LambdaLR(optimizers[j],
lr_lambda=lr_lambda)
for j in range(n)]
for i in range(1, epochs + 1):
print("epochs:",i)
teacher.eval()
for j in range(n):
students[j].train()
loss_total = [0.0] * n
batch_cnt = 0
for batch_idx, (inputs, targets) in enumerate(dataset.train_loader):
inputs = inputs.to(opt.device)
targets = targets.to(opt.device)
if loss_criterion == 'KD':
teacher_outputs = None
with torch.no_grad():
teacher_outputs = teacher(inputs)
elif loss_criterion == 'CE':
targets = targets.to(opt.device)
for j in range(n):
if lr_schedule == 'linear':
schedulers[j].step()
optimizers[j].zero_grad()
#print(students[j])
student_outputs = students[j](inputs)
if loss_criterion == 'KD':
loss1 = criterion(student_outputs, teacher_outputs)
loss2 = criterion(student_outputs, targets)
loss = loss1+loss2
elif loss_criterion == 'CE':
loss = criterion(student_outputs, targets)
loss.backward()
optimizers[j].step()
loss_total[j] += loss.item()
batch_cnt += 1
for j in range(n):
opt.writer.add_scalar('step_%d/sample_%d_loss' % (opt.i, j),
loss_total[j], i)
test_loss = test_model_regression(students[j], dataset)
#print("loss"+str(j)+": ", test_loss)
#opt.writer.add_scalar('step_%d/sample_%d_acc' % (opt.i, j), acc, i)
if test_loss < loss_best[j]:
loss_best[j] = test_loss
students_best[j] = students[j].module
return students_best, loss_best
| 38.218798
| 84
| 0.566763
| 2,922
| 24,804
| 4.5859
| 0.058522
| 0.025075
| 0.031194
| 0.019851
| 0.884179
| 0.864776
| 0.845672
| 0.839179
| 0.832985
| 0.820448
| 0
| 0.009974
| 0.328979
| 24,804
| 648
| 85
| 38.277778
| 0.795121
| 0.053177
| 0
| 0.810662
| 0
| 0
| 0.024191
| 0.002773
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022059
| false
| 0
| 0.018382
| 0
| 0.0625
| 0.022059
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.