hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
6583de6de1913baff3c64de57ef3c14941675bd2
| 135
|
py
|
Python
|
jsonfriendly_redshift/utils.py
|
mishraamrish/jsonfriendly-redshift
|
5ba63b1159b9ac56e3173262fb30f9dde0595cd6
|
[
"MIT"
] | 1
|
2020-09-05T16:12:04.000Z
|
2020-09-05T16:12:04.000Z
|
jsonfriendly_redshift/utils.py
|
mishraamrish/jsonfriendly-redshift
|
5ba63b1159b9ac56e3173262fb30f9dde0595cd6
|
[
"MIT"
] | null | null | null |
jsonfriendly_redshift/utils.py
|
mishraamrish/jsonfriendly-redshift
|
5ba63b1159b9ac56e3173262fb30f9dde0595cd6
|
[
"MIT"
] | null | null | null |
import json
def generate_json_for_copy_query(list_data: list) -> str:
return "\n".join([json.dumps(data) for data in list_data])
| 22.5
| 62
| 0.733333
| 23
| 135
| 4.043478
| 0.652174
| 0.172043
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140741
| 135
| 5
| 63
| 27
| 0.801724
| 0
| 0
| 0
| 1
| 0
| 0.014815
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
658ef23097d0ef094ea1e1a47f7a20e0fed49a05
| 3,191
|
py
|
Python
|
unittests/test_day19p1.py
|
Nixxen/advent_of_code_2021
|
5dec9eec22272322b75961256d471876437b3597
|
[
"CC0-1.0"
] | null | null | null |
unittests/test_day19p1.py
|
Nixxen/advent_of_code_2021
|
5dec9eec22272322b75961256d471876437b3597
|
[
"CC0-1.0"
] | null | null | null |
unittests/test_day19p1.py
|
Nixxen/advent_of_code_2021
|
5dec9eec22272322b75961256d471876437b3597
|
[
"CC0-1.0"
] | null | null | null |
from day19 import day19_part1 as test_code
# Rotate point tests
# rotate_point(point: tuple, rotation: int, axis: int) -> tuple
print(f"test_code: {test_code}")
def test_rotate_zero():
assert test_code.rotate_point((1, 2, 3), 0, 0) == (1, 2, 3)
def test_rotate_90_origin():
# x, y, z = (0,0,0)
assert test_code.rotate_point((0, 0, 0), 1, 0) == (0, 0, 0)
assert test_code.rotate_point((0, 0, 0), 1, 1) == (0, 0, 0)
assert test_code.rotate_point((0, 0, 0), 1, 2) == (0, 0, 0)
def test_rotate_90_x():
# x, y, z = (1,0,0)
assert test_code.rotate_point((1, 0, 0), 1, 0) == (1, 0, 0)
assert test_code.rotate_point((1, 0, 0), 1, 1) == (0, 0, -1)
assert test_code.rotate_point((1, 0, 0), 1, 2) == (0, 1, 0)
def test_rotate_90_y():
# x, y, z = (0,1,0)
assert test_code.rotate_point((0, 1, 0), 1, 0) == (0, 0, 1)
assert test_code.rotate_point((0, 1, 0), 1, 1) == (0, 1, 0)
assert test_code.rotate_point((0, 1, 0), 1, 2) == (-1, 0, 0)
def test_rotate_90_z():
# x, y, z = (0,0,1)
assert test_code.rotate_point((0, 0, 1), 1, 0) == (0, -1, 0)
assert test_code.rotate_point((0, 0, 1), 1, 1) == (1, 0, 0)
assert test_code.rotate_point((0, 0, 1), 1, 2) == (0, 0, 1)
def test_rotate_180_origin():
# x, y, z = (0,0,0)
assert test_code.rotate_point((0, 0, 0), 2, 0) == (0, 0, 0)
assert test_code.rotate_point((0, 0, 0), 2, 1) == (0, 0, 0)
assert test_code.rotate_point((0, 0, 0), 2, 2) == (0, 0, 0)
def test_rotate_180_x():
# x, y, z = (1,0,0)
assert test_code.rotate_point((1, 0, 0), 2, 0) == (1, 0, 0)
assert test_code.rotate_point((1, 0, 0), 2, 1) == (-1, 0, 0)
assert test_code.rotate_point((1, 0, 0), 2, 2) == (-1, 0, 0)
def test_rotate_180_y():
# x, y, z = (0,1,0)
assert test_code.rotate_point((0, 1, 0), 2, 0) == (0, -1, 0)
assert test_code.rotate_point((0, 1, 0), 2, 1) == (0, 1, 0)
assert test_code.rotate_point((0, 1, 0), 2, 2) == (0, -1, 0)
def test_rotate_180_z():
# x, y, z = (0,0,1)
assert test_code.rotate_point((0, 0, 1), 2, 0) == (0, 0, -1)
assert test_code.rotate_point((0, 0, 1), 2, 1) == (0, 0, -1)
assert test_code.rotate_point((0, 0, 1), 2, 2) == (0, 0, 1)
def test_rotate_270_origin():
# x, y, z = (0,0,0)
assert test_code.rotate_point((0, 0, 0), 3, 0) == (0, 0, 0)
assert test_code.rotate_point((0, 0, 0), 3, 1) == (0, 0, 0)
assert test_code.rotate_point((0, 0, 0), 3, 2) == (0, 0, 0)
def test_rotate_270_x():
# x, y, z = (1,0,0)
assert test_code.rotate_point((1, 0, 0), 3, 0) == (1, 0, 0)
assert test_code.rotate_point((1, 0, 0), 3, 1) == (0, 0, 1)
assert test_code.rotate_point((1, 0, 0), 3, 2) == (0, -1, 0)
def test_rotate_270_y():
# x, y, z = (0,1,0)
assert test_code.rotate_point((0, 1, 0), 3, 0) == (0, 0, -1)
assert test_code.rotate_point((0, 1, 0), 3, 1) == (0, 1, 0)
assert test_code.rotate_point((0, 1, 0), 3, 2) == (1, 0, 0)
def test_rotate_270_z():
# x, y, z = (0,0,1)
assert test_code.rotate_point((0, 0, 1), 3, 0) == (0, 1, 0)
assert test_code.rotate_point((0, 0, 1), 3, 1) == (-1, 0, 0)
assert test_code.rotate_point((0, 0, 1), 3, 2) == (0, 0, 1)
| 33.946809
| 64
| 0.551238
| 643
| 3,191
| 2.553655
| 0.046656
| 0.114495
| 0.323995
| 0.439708
| 0.898904
| 0.894032
| 0.868453
| 0.754568
| 0.754568
| 0.749695
| 0
| 0.14885
| 0.223128
| 3,191
| 93
| 65
| 34.311828
| 0.513514
| 0.092761
| 0
| 0
| 0
| 0
| 0.007639
| 0
| 0
| 0
| 0
| 0
| 0.711538
| 1
| 0.25
| true
| 0
| 0.019231
| 0
| 0.269231
| 0.019231
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
65ce0ae0f1c9fda567a8c2e735d06b731f9c3d49
| 186
|
py
|
Python
|
chargebee_v2/models/download.py
|
armisael/chargebee-python
|
df01e80c8ed7ac8ed37ad985ce5a5682cba64c30
|
[
"MIT"
] | null | null | null |
chargebee_v2/models/download.py
|
armisael/chargebee-python
|
df01e80c8ed7ac8ed37ad985ce5a5682cba64c30
|
[
"MIT"
] | null | null | null |
chargebee_v2/models/download.py
|
armisael/chargebee-python
|
df01e80c8ed7ac8ed37ad985ce5a5682cba64c30
|
[
"MIT"
] | null | null | null |
import json
from chargebee_v2.model import Model
from chargebee_v2 import request
from chargebee_v2 import APIError
class Download(Model):
fields = ["download_url", "valid_till"]
| 18.6
| 43
| 0.790323
| 26
| 186
| 5.461538
| 0.538462
| 0.274648
| 0.316901
| 0.295775
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.018868
| 0.145161
| 186
| 9
| 44
| 20.666667
| 0.874214
| 0
| 0
| 0
| 0
| 0
| 0.118919
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.666667
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
029f4bb6c1704593fcef70d440faf53a780efc2b
| 13,815
|
py
|
Python
|
tests/previs/test_track.py
|
MehmetErer/anima
|
f92ae599b5a4c181fc8e131a9ccdde537e635303
|
[
"MIT"
] | 101
|
2015-02-08T22:20:11.000Z
|
2022-03-21T18:56:42.000Z
|
tests/previs/test_track.py
|
Khosiyat/anima
|
f631c08400547f49ac5f1feeb730f22c255eb771
|
[
"MIT"
] | 23
|
2016-11-30T08:33:21.000Z
|
2021-01-26T12:11:12.000Z
|
tests/previs/test_track.py
|
Khosiyat/anima
|
f631c08400547f49ac5f1feeb730f22c255eb771
|
[
"MIT"
] | 27
|
2015-01-03T06:49:45.000Z
|
2021-12-28T03:30:54.000Z
|
# -*- coding: utf-8 -*-
import unittest
from anima.edit import Track, Clip, File
class TrackTestCase(unittest.TestCase):
"""tests the anima.previs.Track class
"""
def test_to_xml_method_is_working_properly(self):
"""testing if the to xml method is working properly
"""
t = Track()
t.enabled = True
t.locked = False
# clip 1
f = File()
f.duration = 34
f.name = 'shot2'
f.pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot2.mov'
c = Clip()
c.id = 'shot2'
c.start = 1
c.end = 35
c.name = 'shot2'
c.enabled = True
c.duration = 34
c.in_ = 0
c.out = 34
c.file = f
t.clips.append(c)
# clip 2
f = File()
f.duration = 30
f.name = 'shot'
f.pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot.mov'
c = Clip()
c.id = 'shot'
c.start = 35
c.end = 65
c.name = 'shot'
c.enabled = True
c.duration = 30
c.in_ = 0
c.out = 30
c.file = f
t.clips.append(c)
# clip 3
f = File()
f.duration = 45
f.name = 'shot1'
f.pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot1.mov'
c = Clip()
c.id = 'shot1'
c.start = 65
c.end = 110
c.name = 'shot1'
c.enabled = True
c.duration = 45
c.in_ = 0
c.out = 45
c.file = f
t.clips.append(c)
expected_xml = \
"""<track>
<locked>FALSE</locked>
<enabled>TRUE</enabled>
<clipitem id="shot2">
<end>35</end>
<name>shot2</name>
<enabled>True</enabled>
<start>1</start>
<in>0</in>
<duration>34</duration>
<out>34</out>
<file id="shot2.mov">
<duration>34</duration>
<name>shot2</name>
<pathurl>file://localhost/home/eoyilmaz/maya/projects/default/data/shot2.mov</pathurl>
</file>
</clipitem>
<clipitem id="shot">
<end>65</end>
<name>shot</name>
<enabled>True</enabled>
<start>35</start>
<in>0</in>
<duration>30</duration>
<out>30</out>
<file id="shot.mov">
<duration>30</duration>
<name>shot</name>
<pathurl>file://localhost/home/eoyilmaz/maya/projects/default/data/shot.mov</pathurl>
</file>
</clipitem>
<clipitem id="shot1">
<end>110</end>
<name>shot1</name>
<enabled>True</enabled>
<start>65</start>
<in>0</in>
<duration>45</duration>
<out>45</out>
<file id="shot1.mov">
<duration>45</duration>
<name>shot1</name>
<pathurl>file://localhost/home/eoyilmaz/maya/projects/default/data/shot1.mov</pathurl>
</file>
</clipitem>
</track>"""
self.assertEqual(
expected_xml,
t.to_xml()
)
def test_from_xml_method_is_working_properly(self):
"""testing if the from_xml method will fill object attributes from the
given xml node
"""
from xml.etree import ElementTree
track_node = ElementTree.Element('track')
locked_node = ElementTree.SubElement(track_node, 'locked')
locked_node.text = 'FALSE'
enabled_node = ElementTree.SubElement(track_node, 'enabled')
enabled_node.text = 'TRUE'
# clip1
clip_node = ElementTree.SubElement(track_node, 'clipitem',
attrib={'id': 'shot2'})
end_node = ElementTree.SubElement(clip_node, 'end')
end_node.text = '35'
name_node = ElementTree.SubElement(clip_node, 'name')
name_node.text = 'shot2'
enabled_node = ElementTree.SubElement(clip_node, 'enabled')
enabled_node.text = 'True'
start_node = ElementTree.SubElement(clip_node, 'start')
start_node.text = '1'
in_node = ElementTree.SubElement(clip_node, 'in')
in_node.text = '0'
duration_node = ElementTree.SubElement(clip_node, 'duration')
duration_node.text = '34'
out_node = ElementTree.SubElement(clip_node, 'out')
out_node.text = '34'
file_node = ElementTree.SubElement(clip_node, 'file')
duration_node = ElementTree.SubElement(file_node, 'duration')
duration_node.text = '34'
name_node = ElementTree.SubElement(file_node, 'name')
name_node.text = 'shot2'
pathurl_node = ElementTree.SubElement(file_node, 'pathurl')
pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot2.mov'
pathurl_node.text = pathurl
# clip2
clip_node = ElementTree.SubElement(track_node, 'clipitem',
attrib={'id': 'shot'})
end_node = ElementTree.SubElement(clip_node, 'end')
end_node.text = '65'
name_node = ElementTree.SubElement(clip_node, 'name')
name_node.text = 'shot'
enabled_node = ElementTree.SubElement(clip_node, 'enabled')
enabled_node.text = 'True'
start_node = ElementTree.SubElement(clip_node, 'start')
start_node.text = '35'
in_node = ElementTree.SubElement(clip_node, 'in')
in_node.text = '0'
duration_node = ElementTree.SubElement(clip_node, 'duration')
duration_node.text = '30'
out_node = ElementTree.SubElement(clip_node, 'out')
out_node.text = '30'
file_node = ElementTree.SubElement(clip_node, 'file')
duration_node = ElementTree.SubElement(file_node, 'duration')
duration_node.text = '30'
name_node = ElementTree.SubElement(file_node, 'name')
name_node.text = 'shot'
pathurl_node = ElementTree.SubElement(file_node, 'pathurl')
pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot.mov'
pathurl_node.text = pathurl
# clip3
clip_node = ElementTree.SubElement(track_node, 'clipitem',
attrib={'id': 'shot1'})
end_node = ElementTree.SubElement(clip_node, 'end')
end_node.text = '110'
name_node = ElementTree.SubElement(clip_node, 'name')
name_node.text = 'shot1'
enabled_node = ElementTree.SubElement(clip_node, 'enabled')
enabled_node.text = 'True'
start_node = ElementTree.SubElement(clip_node, 'start')
start_node.text = '65'
in_node = ElementTree.SubElement(clip_node, 'in')
in_node.text = '0'
duration_node = ElementTree.SubElement(clip_node, 'duration')
duration_node.text = '45'
out_node = ElementTree.SubElement(clip_node, 'out')
out_node.text = '45'
file_node = ElementTree.SubElement(clip_node, 'file')
duration_node = ElementTree.SubElement(file_node, 'duration')
duration_node.text = '45'
name_node = ElementTree.SubElement(file_node, 'name')
name_node.text = 'shot1'
pathurl_node = ElementTree.SubElement(file_node, 'pathurl')
pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot1.mov'
pathurl_node.text = pathurl
t = Track()
t.from_xml(track_node)
self.assertEqual(False, t.locked)
self.assertEqual(True, t.enabled)
# clip1
c = t.clips[0]
self.assertEqual(35, c.end)
self.assertEqual('shot2', c.name)
self.assertEqual(True, c.enabled)
self.assertEqual(1, c.start)
self.assertEqual(0, c.in_)
self.assertEqual(34, c.duration)
self.assertEqual(34, c.out)
f = c.file
self.assertEqual(34, f.duration)
self.assertEqual('shot2', f.name)
self.assertEqual(
'file://localhost/home/eoyilmaz/maya/projects/default/data/shot2.mov',
f.pathurl
)
# clip2
c = t.clips[1]
self.assertEqual(65, c.end)
self.assertEqual('shot', c.name)
self.assertEqual(True, c.enabled)
self.assertEqual(35, c.start)
self.assertEqual(0, c.in_)
self.assertEqual(30, c.duration)
self.assertEqual(30, c.out)
f = c.file
self.assertEqual(30, f.duration)
self.assertEqual('shot', f.name)
self.assertEqual(
'file://localhost/home/eoyilmaz/maya/projects/default/data/shot.mov',
f.pathurl
)
# clip3
c = t.clips[2]
self.assertEqual(110, c.end)
self.assertEqual('shot1', c.name)
self.assertEqual(True, c.enabled)
self.assertEqual(65, c.start)
self.assertEqual(0, c.in_)
self.assertEqual(45, c.duration)
self.assertEqual(45, c.out)
f = c.file
self.assertEqual(45, f.duration)
self.assertEqual('shot1', f.name)
self.assertEqual(
'file://localhost/home/eoyilmaz/maya/projects/default/data/shot1.mov',
f.pathurl
)
def test_optimize_clips_is_working_properly(self):
"""testing if the optimize_clips method will optimize the clips to use
the same file node if the file pathurls are same
"""
t = Track()
t.enabled = True
t.locked = False
# clip 1
f = File()
f.duration = 34
f.name = 'shot2'
f.pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot2.mov'
c = Clip()
c.id = 'shot2'
c.start = 1
c.end = 35
c.name = 'shot2'
c.enabled = True
c.duration = 34
c.in_ = 0
c.out = 34
c.file = f
t.clips.append(c)
# clip 2
f = File()
f.duration = 30
f.name = 'shot'
f.pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot2.mov'
c = Clip()
c.id = 'shot'
c.start = 35
c.end = 65
c.name = 'shot'
c.enabled = True
c.duration = 30
c.in_ = 0
c.out = 30
c.file = f
t.clips.append(c)
# clip 3
f = File()
f.duration = 45
f.name = 'shot1'
f.pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot1.mov'
c = Clip()
c.id = 'shot1'
c.start = 65
c.end = 110
c.name = 'shot1'
c.enabled = True
c.duration = 45
c.in_ = 0
c.out = 45
c.file = f
t.clips.append(c)
# check if the file objects are different
self.assertNotEqual(t.clips[0].file, t.clips[1].file)
self.assertNotEqual(t.clips[0].file, t.clips[2].file)
self.assertNotEqual(t.clips[1].file, t.clips[2].file)
# now optimize the clips
t.optimize_clips()
# check if the file[0] and file[1] is the same file node
# and the file[2] is different than the others
self.assertEqual(t.clips[0].file, t.clips[1].file)
self.assertNotEqual(t.clips[0].file, t.clips[2].file)
self.assertNotEqual(t.clips[1].file, t.clips[2].file)
def test_to_xml_method_with_optimized_clips_is_working_properly(self):
"""testing if the to xml method is working properly with the clips are
optimized
"""
t = Track()
t.enabled = True
t.locked = False
# clip 1
f = File()
f.duration = 34
f.name = 'shot2'
f.pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot2.mov'
c = Clip()
c.id = 'shot2'
c.start = 1
c.end = 35
c.name = 'shot2'
c.enabled = True
c.duration = 34
c.in_ = 0
c.out = 34
c.file = f
t.clips.append(c)
# clip 2
f = File()
f.duration = 30
f.name = 'shot'
f.pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot2.mov'
c = Clip()
c.id = 'shot2'
c.start = 35
c.end = 65
c.name = 'shot2'
c.enabled = True
c.duration = 30
c.in_ = 0
c.out = 30
c.file = f
t.clips.append(c)
# clip 3
f = File()
f.duration = 45
f.name = 'shot1'
f.pathurl = 'file://localhost/home/eoyilmaz/maya/projects/default/data/shot1.mov'
c = Clip()
c.id = 'shot1'
c.start = 65
c.end = 110
c.name = 'shot1'
c.enabled = True
c.duration = 45
c.in_ = 0
c.out = 45
c.file = f
t.clips.append(c)
expected_xml = \
"""<track>
<locked>FALSE</locked>
<enabled>TRUE</enabled>
<clipitem id="shot2">
<end>35</end>
<name>shot2</name>
<enabled>True</enabled>
<start>1</start>
<in>0</in>
<duration>34</duration>
<out>34</out>
<file id="shot2.mov">
<duration>34</duration>
<name>shot2</name>
<pathurl>file://localhost/home/eoyilmaz/maya/projects/default/data/shot2.mov</pathurl>
</file>
</clipitem>
<clipitem id="shot2 2">
<end>65</end>
<name>shot2</name>
<enabled>True</enabled>
<start>35</start>
<in>0</in>
<duration>30</duration>
<out>30</out>
<file id="shot2.mov"/>
</clipitem>
<clipitem id="shot1">
<end>110</end>
<name>shot1</name>
<enabled>True</enabled>
<start>65</start>
<in>0</in>
<duration>45</duration>
<out>45</out>
<file id="shot1.mov">
<duration>45</duration>
<name>shot1</name>
<pathurl>file://localhost/home/eoyilmaz/maya/projects/default/data/shot1.mov</pathurl>
</file>
</clipitem>
</track>"""
t.optimize_clips()
self.assertEqual(
expected_xml,
t.to_xml()
)
| 28.962264
| 92
| 0.560912
| 1,709
| 13,815
| 4.441194
| 0.057929
| 0.077075
| 0.125165
| 0.0917
| 0.849144
| 0.830435
| 0.820422
| 0.802108
| 0.790119
| 0.727668
| 0
| 0.034239
| 0.304452
| 13,815
| 476
| 93
| 29.023109
| 0.755646
| 0.047774
| 0
| 0.777027
| 0
| 0
| 0.135595
| 0.092615
| 0
| 0
| 0
| 0
| 0.135135
| 1
| 0.013514
| false
| 0
| 0.010135
| 0
| 0.027027
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
02af378000025ca85934a4a30e92ef15673d39ab
| 25
|
py
|
Python
|
t/t1.py
|
voidlizard/beepc
|
692f06fc90f06d753a8db786b27fafcf726f991c
|
[
"MIT"
] | 2
|
2017-01-25T17:15:58.000Z
|
2017-09-05T20:57:46.000Z
|
t/t1.py
|
voidlizard/beepc
|
692f06fc90f06d753a8db786b27fafcf726f991c
|
[
"MIT"
] | null | null | null |
t/t1.py
|
voidlizard/beepc
|
692f06fc90f06d753a8db786b27fafcf726f991c
|
[
"MIT"
] | null | null | null |
b = a
a = 1
print b
| 3.125
| 7
| 0.4
| 6
| 25
| 1.666667
| 0.666667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.083333
| 0.52
| 25
| 7
| 8
| 3.571429
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.333333
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
02ba8144eba30f7cb451736e210a449775b83b13
| 322
|
py
|
Python
|
extensions/.stubs/clrclasses/System/Net/Cache/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | 1
|
2020-03-25T03:27:24.000Z
|
2020-03-25T03:27:24.000Z
|
extensions/.stubs/clrclasses/System/Net/Cache/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | null | null | null |
extensions/.stubs/clrclasses/System/Net/Cache/__init__.py
|
vicwjb/Pycad
|
7391cd694b7a91ad9f9964ec95833c1081bc1f84
|
[
"MIT"
] | null | null | null |
from __clrclasses__.System.Net.Cache import HttpCacheAgeControl
from __clrclasses__.System.Net.Cache import HttpRequestCacheLevel
from __clrclasses__.System.Net.Cache import HttpRequestCachePolicy
from __clrclasses__.System.Net.Cache import RequestCacheLevel
from __clrclasses__.System.Net.Cache import RequestCachePolicy
| 53.666667
| 66
| 0.891304
| 35
| 322
| 7.628571
| 0.314286
| 0.262172
| 0.374532
| 0.430712
| 0.636704
| 0.636704
| 0
| 0
| 0
| 0
| 0
| 0
| 0.062112
| 322
| 5
| 67
| 64.4
| 0.884106
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
02f17a68722bbedae002c6462af8c749e96d5c25
| 19,106
|
py
|
Python
|
plugins/rapid7_insightvm/komand_rapid7_insightvm/actions/asset_vulnerability_solution/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/rapid7_insightvm/komand_rapid7_insightvm/actions/asset_vulnerability_solution/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/rapid7_insightvm/komand_rapid7_insightvm/actions/asset_vulnerability_solution/schema.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
import komand
import json
class Component:
DESCRIPTION = "Returns the highest-superceding rollup solutions for a list of vulnerabilities on an asset"
class Input:
ASSET_ID = "asset_id"
VULNERABILITY_IDS = "vulnerability_ids"
class Output:
VULNERABILITIES_SOLUTION = "vulnerabilities_solution"
class AssetVulnerabilitySolutionInput(komand.Input):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"asset_id": {
"type": "string",
"title": "Asset ID",
"description": "The identifier of the asset",
"order": 1
},
"vulnerability_ids": {
"type": "array",
"title": "Vulnerability IDs",
"description": "A list of identifiers of the vulnerabilities",
"items": {
"type": "string"
},
"order": 2
}
},
"required": [
"asset_id",
"vulnerability_ids"
]
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
class AssetVulnerabilitySolutionOutput(komand.Output):
schema = json.loads("""
{
"type": "object",
"title": "Variables",
"properties": {
"vulnerabilities_solution": {
"type": "array",
"title": "Solutions for Vulnerabilities",
"description": "Highest-superceding rollup solutions for a vulnerabilities on an asset",
"items": {
"$ref": "#/definitions/vulnerability_solution"
},
"order": 1
}
},
"required": [
"vulnerabilities_solution"
],
"definitions": {
"fingerprint": {
"type": "object",
"title": "fingerprint",
"properties": {
"description": {
"type": "string",
"title": "Description",
"order": 1
},
"family": {
"type": "string",
"title": "Family",
"order": 2
},
"product": {
"type": "string",
"title": "Product",
"order": 3
},
"vendor": {
"type": "string",
"title": "Vendor",
"order": 4
},
"version": {
"type": "string",
"title": "Version",
"order": 5
}
}
},
"link": {
"type": "object",
"title": "link",
"properties": {
"href": {
"type": "string",
"title": "URL",
"description": "A hypertext reference, which is either a URI (see RFC 3986) or URI template (see RFC 6570)",
"order": 1
},
"rel": {
"type": "string",
"title": "Rel",
"description": "Link relation type following RFC 5988",
"order": 2
}
}
},
"match": {
"type": "object",
"title": "match",
"properties": {
"confidence": {
"type": "string",
"title": "Confidence",
"order": 1
},
"fingerprint": {
"$ref": "#/definitions/fingerprint",
"title": "Fingerprint",
"order": 2
}
},
"definitions": {
"fingerprint": {
"type": "object",
"title": "fingerprint",
"properties": {
"description": {
"type": "string",
"title": "Description",
"order": 1
},
"family": {
"type": "string",
"title": "Family",
"order": 2
},
"product": {
"type": "string",
"title": "Product",
"order": 3
},
"vendor": {
"type": "string",
"title": "Vendor",
"order": 4
},
"version": {
"type": "string",
"title": "Version",
"order": 5
}
}
}
}
},
"resources": {
"type": "object",
"title": "resources",
"properties": {
"appliesTo": {
"type": "string",
"title": "Applies To",
"order": 1
},
"confidence": {
"type": "string",
"title": "Confidence",
"order": 2
},
"estimate": {
"type": "string",
"title": "Estimate",
"order": 3
},
"id": {
"type": "string",
"title": "ID",
"order": 4
},
"links": {
"type": "array",
"title": "links",
"items": {
"$ref": "#/definitions/link"
},
"order": 5
},
"matches": {
"type": "array",
"title": "Matches",
"items": {
"$ref": "#/definitions/match"
},
"order": 7
},
"steps": {
"$ref": "#/definitions/step",
"title": "Steps",
"order": 8
},
"summary": {
"$ref": "#/definitions/summary",
"title": "Summary",
"order": 9
},
"type": {
"type": "string",
"title": "Type",
"order": 6
}
},
"definitions": {
"fingerprint": {
"type": "object",
"title": "fingerprint",
"properties": {
"description": {
"type": "string",
"title": "Description",
"order": 1
},
"family": {
"type": "string",
"title": "Family",
"order": 2
},
"product": {
"type": "string",
"title": "Product",
"order": 3
},
"vendor": {
"type": "string",
"title": "Vendor",
"order": 4
},
"version": {
"type": "string",
"title": "Version",
"order": 5
}
}
},
"link": {
"type": "object",
"title": "link",
"properties": {
"href": {
"type": "string",
"title": "URL",
"description": "A hypertext reference, which is either a URI (see RFC 3986) or URI template (see RFC 6570)",
"order": 1
},
"rel": {
"type": "string",
"title": "Rel",
"description": "Link relation type following RFC 5988",
"order": 2
}
}
},
"match": {
"type": "object",
"title": "match",
"properties": {
"confidence": {
"type": "string",
"title": "Confidence",
"order": 1
},
"fingerprint": {
"$ref": "#/definitions/fingerprint",
"title": "Fingerprint",
"order": 2
}
},
"definitions": {
"fingerprint": {
"type": "object",
"title": "fingerprint",
"properties": {
"description": {
"type": "string",
"title": "Description",
"order": 1
},
"family": {
"type": "string",
"title": "Family",
"order": 2
},
"product": {
"type": "string",
"title": "Product",
"order": 3
},
"vendor": {
"type": "string",
"title": "Vendor",
"order": 4
},
"version": {
"type": "string",
"title": "Version",
"order": 5
}
}
}
}
},
"step": {
"type": "object",
"title": "step",
"properties": {
"html": {
"type": "string",
"title": "HTML",
"order": 1
},
"text": {
"type": "string",
"title": "text",
"order": 2
}
}
},
"summary": {
"type": "object",
"title": "summary",
"properties": {
"html": {
"type": "string",
"title": "HTML",
"order": 1
},
"text": {
"type": "string",
"title": "text",
"order": 2
}
}
}
}
},
"step": {
"type": "object",
"title": "step",
"properties": {
"html": {
"type": "string",
"title": "HTML",
"order": 1
},
"text": {
"type": "string",
"title": "text",
"order": 2
}
}
},
"summary": {
"type": "object",
"title": "summary",
"properties": {
"html": {
"type": "string",
"title": "HTML",
"order": 1
},
"text": {
"type": "string",
"title": "text",
"order": 2
}
}
},
"vulnerability_solution": {
"type": "object",
"title": "vulnerability_solution",
"properties": {
"links": {
"type": "array",
"title": "Links",
"description": "Hypermedia links to corresponding or related resources",
"items": {
"$ref": "#/definitions/link"
},
"order": 1
},
"resources": {
"type": "array",
"title": "Solutions",
"description": "Solutions to vulnerabilities",
"items": {
"$ref": "#/definitions/resources"
},
"order": 2
}
},
"definitions": {
"fingerprint": {
"type": "object",
"title": "fingerprint",
"properties": {
"description": {
"type": "string",
"title": "Description",
"order": 1
},
"family": {
"type": "string",
"title": "Family",
"order": 2
},
"product": {
"type": "string",
"title": "Product",
"order": 3
},
"vendor": {
"type": "string",
"title": "Vendor",
"order": 4
},
"version": {
"type": "string",
"title": "Version",
"order": 5
}
}
},
"link": {
"type": "object",
"title": "link",
"properties": {
"href": {
"type": "string",
"title": "URL",
"description": "A hypertext reference, which is either a URI (see RFC 3986) or URI template (see RFC 6570)",
"order": 1
},
"rel": {
"type": "string",
"title": "Rel",
"description": "Link relation type following RFC 5988",
"order": 2
}
}
},
"match": {
"type": "object",
"title": "match",
"properties": {
"confidence": {
"type": "string",
"title": "Confidence",
"order": 1
},
"fingerprint": {
"$ref": "#/definitions/fingerprint",
"title": "Fingerprint",
"order": 2
}
},
"definitions": {
"fingerprint": {
"type": "object",
"title": "fingerprint",
"properties": {
"description": {
"type": "string",
"title": "Description",
"order": 1
},
"family": {
"type": "string",
"title": "Family",
"order": 2
},
"product": {
"type": "string",
"title": "Product",
"order": 3
},
"vendor": {
"type": "string",
"title": "Vendor",
"order": 4
},
"version": {
"type": "string",
"title": "Version",
"order": 5
}
}
}
}
},
"resources": {
"type": "object",
"title": "resources",
"properties": {
"appliesTo": {
"type": "string",
"title": "Applies To",
"order": 1
},
"confidence": {
"type": "string",
"title": "Confidence",
"order": 2
},
"estimate": {
"type": "string",
"title": "Estimate",
"order": 3
},
"id": {
"type": "string",
"title": "ID",
"order": 4
},
"links": {
"type": "array",
"title": "links",
"items": {
"$ref": "#/definitions/link"
},
"order": 5
},
"matches": {
"type": "array",
"title": "Matches",
"items": {
"$ref": "#/definitions/match"
},
"order": 7
},
"steps": {
"$ref": "#/definitions/step",
"title": "Steps",
"order": 8
},
"summary": {
"$ref": "#/definitions/summary",
"title": "Summary",
"order": 9
},
"type": {
"type": "string",
"title": "Type",
"order": 6
}
},
"definitions": {
"fingerprint": {
"type": "object",
"title": "fingerprint",
"properties": {
"description": {
"type": "string",
"title": "Description",
"order": 1
},
"family": {
"type": "string",
"title": "Family",
"order": 2
},
"product": {
"type": "string",
"title": "Product",
"order": 3
},
"vendor": {
"type": "string",
"title": "Vendor",
"order": 4
},
"version": {
"type": "string",
"title": "Version",
"order": 5
}
}
},
"link": {
"type": "object",
"title": "link",
"properties": {
"href": {
"type": "string",
"title": "URL",
"description": "A hypertext reference, which is either a URI (see RFC 3986) or URI template (see RFC 6570)",
"order": 1
},
"rel": {
"type": "string",
"title": "Rel",
"description": "Link relation type following RFC 5988",
"order": 2
}
}
},
"match": {
"type": "object",
"title": "match",
"properties": {
"confidence": {
"type": "string",
"title": "Confidence",
"order": 1
},
"fingerprint": {
"$ref": "#/definitions/fingerprint",
"title": "Fingerprint",
"order": 2
}
},
"definitions": {
"fingerprint": {
"type": "object",
"title": "fingerprint",
"properties": {
"description": {
"type": "string",
"title": "Description",
"order": 1
},
"family": {
"type": "string",
"title": "Family",
"order": 2
},
"product": {
"type": "string",
"title": "Product",
"order": 3
},
"vendor": {
"type": "string",
"title": "Vendor",
"order": 4
},
"version": {
"type": "string",
"title": "Version",
"order": 5
}
}
}
}
},
"step": {
"type": "object",
"title": "step",
"properties": {
"html": {
"type": "string",
"title": "HTML",
"order": 1
},
"text": {
"type": "string",
"title": "text",
"order": 2
}
}
},
"summary": {
"type": "object",
"title": "summary",
"properties": {
"html": {
"type": "string",
"title": "HTML",
"order": 1
},
"text": {
"type": "string",
"title": "text",
"order": 2
}
}
}
}
},
"step": {
"type": "object",
"title": "step",
"properties": {
"html": {
"type": "string",
"title": "HTML",
"order": 1
},
"text": {
"type": "string",
"title": "text",
"order": 2
}
}
},
"summary": {
"type": "object",
"title": "summary",
"properties": {
"html": {
"type": "string",
"title": "HTML",
"order": 1
},
"text": {
"type": "string",
"title": "text",
"order": 2
}
}
}
}
}
}
}
""")
def __init__(self):
super(self.__class__, self).__init__(self.schema)
| 25.749326
| 126
| 0.321574
| 1,119
| 19,106
| 5.455764
| 0.09294
| 0.13104
| 0.194103
| 0.041933
| 0.845864
| 0.837346
| 0.825225
| 0.825225
| 0.809173
| 0.809173
| 0
| 0.01564
| 0.521459
| 19,106
| 741
| 127
| 25.784076
| 0.652084
| 0.001937
| 0
| 0.718407
| 1
| 0.005495
| 0.972361
| 0.022762
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002747
| false
| 0
| 0.002747
| 0
| 0.020604
| 0.038462
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
f30508b5c76e411e1f1845ffc98bc3b66db8659f
| 4,296
|
py
|
Python
|
api/v1/tests/test_asset_views.py
|
blockomat2100/vulnman
|
835ff3aae1168d8e2fa5556279bc86efd2e46472
|
[
"MIT"
] | null | null | null |
api/v1/tests/test_asset_views.py
|
blockomat2100/vulnman
|
835ff3aae1168d8e2fa5556279bc86efd2e46472
|
[
"MIT"
] | 23
|
2021-12-01T10:00:38.000Z
|
2021-12-11T11:43:13.000Z
|
api/v1/tests/test_asset_views.py
|
blockomat2100/vulnman
|
835ff3aae1168d8e2fa5556279bc86efd2e46472
|
[
"MIT"
] | null | null | null |
from rest_framework.test import APITestCase
from api.v1.mixins.testcase import VulnmanAPITestCaseMixin
from apps.assets import models
class AgentHostViewSetTestCase(APITestCase, VulnmanAPITestCaseMixin):
def setUp(self):
self.init_mixin()
def test_createview(self):
token = self.create_project_token(self.project1, self.pentester1)
data = {"ip": "1.2.3.4"}
url = self.get_url("api:v1:agents:host-list")
response = self.client.post(
url, data, HTTP_AUTHORIZATION="Token %s" % token.key
)
self.assertEqual(response.status_code, 201)
self.assertEqual(response.json()["project"], str(self.project1.pk))
self.assertEqual(models.Host.objects.count(), 1)
response = self.client.post(
url, data, HTTP_AUTHORIZATION="Token %s" % token.key
)
self.assertEqual(response.status_code, 201)
self.assertEqual(models.Host.objects.count(), 1)
def test_detailview(self):
token = self.create_project_token(self.project1, self.pentester1)
host = self.create_instance(models.Host, project=self.project1)
url = self.get_url("api:v1:agents:host-detail", pk=str(host.pk))
response = self.client.get(
url, HTTP_AUTHORIZATION="Token %s" % token.key
)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()["project"], str(self.project1.pk))
def test_listview(self):
token = self.create_project_token(self.project1, self.pentester1)
host = self.create_instance(models.Host, project=self.project1)
self.create_instance(models.Host, project=self.project2)
url = self.get_url("api:v1:agents:host-list")
response = self.client.get(
url, HTTP_AUTHORIZATION="Token %s" % token.key)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json().get("count"), 1)
self.assertEqual(response.json()["results"][0]["uuid"], str(host.pk))
def test_createview_forbidden(self):
# TODO: implement
pass
def test_updateview(self):
# TODO: implement
pass
def test_updateview_forbidden(self):
# TODO: implement
# TODO: check if host can be changed after creation
pass
class AgentServiceViewSetTestCase(APITestCase, VulnmanAPITestCaseMixin):
def setUp(self):
self.init_mixin()
def test_detailview(self):
"""Test detail view for service
"""
token = self.create_project_token(self.project1, self.pentester1)
service = self.create_instance(models.Service, project=self.project1)
url = self.get_url("api:v1:agents:service-detail", pk=service.pk)
response = self.client.get(
url, HTTP_AUTHORIZATION="Token %s" % token.key)
self.assertEqual(response.status_code, 200)
self.assertEqual(response.json()["uuid"], str(service.pk))
def test_detailview_forbidden(self):
token = self.create_project_token(self.project1, self.pentester1)
service = self.create_instance(models.Service, project=self.project2)
url = self.get_url("api:v1:agents:service-detail", pk=service.pk)
response = self.client.get(
url, HTTP_AUTHORIZATION="Token %s" % token.key)
self.assertEqual(response.status_code, 404)
def test_createview(self):
token = self.create_project_token(self.project1, self.pentester1)
host = self.create_instance(models.Host, project=self.project1)
data = {
"name": "Test Service", "host": str(host.pk), "port": 443,
"protocol": "tcp", "banner": "Test", "state": "open"}
url = self.get_url("api:v1:agents:service-list")
response = self.client.post(
url, data, HTTP_AUTHORIZATION="Token %s" % token.key)
self.assertEqual(response.status_code, 201)
self.assertEqual(response.json()["project"], str(self.project1.pk))
def test_createview_forbidden(self):
# TODO: implement
pass
def test_updateview(self):
# TODO: implement
pass
def test_updateview_forbidden(self):
# TODO: implement
# TODO: check if host can be changed after creation
pass
| 39.777778
| 77
| 0.654795
| 513
| 4,296
| 5.37037
| 0.17154
| 0.08167
| 0.10853
| 0.058439
| 0.842468
| 0.842468
| 0.842468
| 0.810163
| 0.797459
| 0.797459
| 0
| 0.017991
| 0.223696
| 4,296
| 107
| 78
| 40.149533
| 0.808096
| 0.054469
| 0
| 0.691358
| 0
| 0
| 0.077303
| 0.037787
| 0
| 0
| 0
| 0.018692
| 0.185185
| 1
| 0.17284
| false
| 0.074074
| 0.037037
| 0
| 0.234568
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
b8752b0c73bd5222046079a4569c1a5917f0d174
| 53,320
|
py
|
Python
|
infoblox_netmri/api/broker/v3_6_0/cli_credential_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/v3_6_0/cli_credential_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
infoblox_netmri/api/broker/v3_6_0/cli_credential_broker.py
|
IngmarVG-IB/infoblox-netmri
|
b0c725fd64aee1890d83917d911b89236207e564
|
[
"Apache-2.0"
] | null | null | null |
from ..broker import Broker
class CLICredentialBroker(Broker):
controller = "cli_credentials"
def index(self, **kwargs):
"""Lists the available cli credentials. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param UnitID: The internal NetMRI identifier for the NetMRI collector on which the credential is configured.
:type UnitID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param UnitID: The internal NetMRI identifier for the NetMRI collector on which the credential is configured.
:type UnitID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param id: The internal NetMRI identifier for this credential.
:type id: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: The internal NetMRI identifier for this credential.
:type id: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` id
:param sort: The data field(s) to use for sorting the output. Default is id. Valid values are UnitID, Protocol, Origination, UPWUse, HitCount, Vendor, id, Priority, UsernameSecure, PasswordSecure, SecureVersion.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each CLICredential. Valid values are UnitID, Protocol, Origination, UPWUse, HitCount, Vendor, id, Priority, UsernameSecure, PasswordSecure, SecureVersion. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return cli_credentials: An array of the CLICredential objects that match the specified input criteria.
:rtype cli_credentials: Array of CLICredential
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def show(self, **kwargs):
"""Shows the details for the specified cli credential.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param id: The internal NetMRI identifier for this credential.
:type id: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return cli_credential: The cli credential identified by the specified id.
:rtype cli_credential: CLICredential
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def search(self, **kwargs):
"""Lists the available cli credentials matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param HitCount: The number of successful uses of this credential.
:type HitCount: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param HitCount: The number of successful uses of this credential.
:type HitCount: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param Origination: Identifies the source of the credential. 'NETC' indicates an internal credential that may be modified or removed during upgrade processes. 'USER' indicates a user-entered credential.
:type Origination: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param Origination: Identifies the source of the credential. 'NETC' indicates an internal credential that may be modified or removed during upgrade processes. 'USER' indicates a user-entered credential.
:type Origination: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PasswordSecure: The password portion of the credential.
:type PasswordSecure: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PasswordSecure: The password portion of the credential.
:type PasswordSecure: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param Priority: The priority order in which to attempt this credential.
:type Priority: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param Priority: The priority order in which to attempt this credential.
:type Priority: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param Protocol: The protocol for which to use this credential.
:type Protocol: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param Protocol: The protocol for which to use this credential.
:type Protocol: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param SecureVersion: The encryption version of the username and password.
:type SecureVersion: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param SecureVersion: The encryption version of the username and password.
:type SecureVersion: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param UPWUse: Determines the function of the credential. 'GUESS' indicates that this will only be used if vendor default credential collection is enabled, whereas 'LOCAL' means that this credential will be used in all guessing.
:type UPWUse: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param UPWUse: Determines the function of the credential. 'GUESS' indicates that this will only be used if vendor default credential collection is enabled, whereas 'LOCAL' means that this credential will be used in all guessing.
:type UPWUse: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param UnitID: The internal NetMRI identifier for the NetMRI collector on which the credential is configured.
:type UnitID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param UnitID: The internal NetMRI identifier for the NetMRI collector on which the credential is configured.
:type UnitID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param UsernameSecure: The username portion of the credential.
:type UsernameSecure: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param UsernameSecure: The username portion of the credential.
:type UsernameSecure: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param Vendor: The vendor devices against which to try this credential.
:type Vendor: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param Vendor: The vendor devices against which to try this credential.
:type Vendor: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param id: The internal NetMRI identifier for this credential.
:type id: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param id: The internal NetMRI identifier for this credential.
:type id: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` id
:param sort: The data field(s) to use for sorting the output. Default is id. Valid values are UnitID, Protocol, Origination, UPWUse, HitCount, Vendor, id, Priority, UsernameSecure, PasswordSecure, SecureVersion.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each CLICredential. Valid values are UnitID, Protocol, Origination, UPWUse, HitCount, Vendor, id, Priority, UsernameSecure, PasswordSecure, SecureVersion. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against cli credentials, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: HitCount, Origination, PasswordSecure, Priority, Protocol, SecureVersion, UPWUse, UnitID, UsernameSecure, Vendor, id.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return cli_credentials: An array of the CLICredential objects that match the specified input criteria.
:rtype cli_credentials: Array of CLICredential
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available cli credentials matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: HitCount, Origination, PasswordSecure, Priority, Protocol, SecureVersion, UPWUse, UnitID, UsernameSecure, Vendor, id.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_HitCount: The operator to apply to the field HitCount. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. HitCount: The number of successful uses of this credential. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_HitCount: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_HitCount: If op_HitCount is specified, the field named in this input will be compared to the value in HitCount using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_HitCount must be specified if op_HitCount is specified.
:type val_f_HitCount: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_HitCount: If op_HitCount is specified, this value will be compared to the value in HitCount using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_HitCount must be specified if op_HitCount is specified.
:type val_c_HitCount: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_Origination: The operator to apply to the field Origination. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. Origination: Identifies the source of the credential. 'NETC' indicates an internal credential that may be modified or removed during upgrade processes. 'USER' indicates a user-entered credential. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_Origination: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_Origination: If op_Origination is specified, the field named in this input will be compared to the value in Origination using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_Origination must be specified if op_Origination is specified.
:type val_f_Origination: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_Origination: If op_Origination is specified, this value will be compared to the value in Origination using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_Origination must be specified if op_Origination is specified.
:type val_c_Origination: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PasswordSecure: The operator to apply to the field PasswordSecure. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PasswordSecure: The password portion of the credential. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PasswordSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PasswordSecure: If op_PasswordSecure is specified, the field named in this input will be compared to the value in PasswordSecure using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PasswordSecure must be specified if op_PasswordSecure is specified.
:type val_f_PasswordSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PasswordSecure: If op_PasswordSecure is specified, this value will be compared to the value in PasswordSecure using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PasswordSecure must be specified if op_PasswordSecure is specified.
:type val_c_PasswordSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_Priority: The operator to apply to the field Priority. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. Priority: The priority order in which to attempt this credential. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_Priority: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_Priority: If op_Priority is specified, the field named in this input will be compared to the value in Priority using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_Priority must be specified if op_Priority is specified.
:type val_f_Priority: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_Priority: If op_Priority is specified, this value will be compared to the value in Priority using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_Priority must be specified if op_Priority is specified.
:type val_c_Priority: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_Protocol: The operator to apply to the field Protocol. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. Protocol: The protocol for which to use this credential. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_Protocol: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_Protocol: If op_Protocol is specified, the field named in this input will be compared to the value in Protocol using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_Protocol must be specified if op_Protocol is specified.
:type val_f_Protocol: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_Protocol: If op_Protocol is specified, this value will be compared to the value in Protocol using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_Protocol must be specified if op_Protocol is specified.
:type val_c_Protocol: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_SecureVersion: The operator to apply to the field SecureVersion. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. SecureVersion: The encryption version of the username and password. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_SecureVersion: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_SecureVersion: If op_SecureVersion is specified, the field named in this input will be compared to the value in SecureVersion using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_SecureVersion must be specified if op_SecureVersion is specified.
:type val_f_SecureVersion: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_SecureVersion: If op_SecureVersion is specified, this value will be compared to the value in SecureVersion using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_SecureVersion must be specified if op_SecureVersion is specified.
:type val_c_SecureVersion: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_UPWUse: The operator to apply to the field UPWUse. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. UPWUse: Determines the function of the credential. 'GUESS' indicates that this will only be used if vendor default credential collection is enabled, whereas 'LOCAL' means that this credential will be used in all guessing. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_UPWUse: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_UPWUse: If op_UPWUse is specified, the field named in this input will be compared to the value in UPWUse using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_UPWUse must be specified if op_UPWUse is specified.
:type val_f_UPWUse: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_UPWUse: If op_UPWUse is specified, this value will be compared to the value in UPWUse using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_UPWUse must be specified if op_UPWUse is specified.
:type val_c_UPWUse: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_UnitID: The operator to apply to the field UnitID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. UnitID: The internal NetMRI identifier for the NetMRI collector on which the credential is configured. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_UnitID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_UnitID: If op_UnitID is specified, the field named in this input will be compared to the value in UnitID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_UnitID must be specified if op_UnitID is specified.
:type val_f_UnitID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_UnitID: If op_UnitID is specified, this value will be compared to the value in UnitID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_UnitID must be specified if op_UnitID is specified.
:type val_c_UnitID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_UsernameSecure: The operator to apply to the field UsernameSecure. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. UsernameSecure: The username portion of the credential. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_UsernameSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_UsernameSecure: If op_UsernameSecure is specified, the field named in this input will be compared to the value in UsernameSecure using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_UsernameSecure must be specified if op_UsernameSecure is specified.
:type val_f_UsernameSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_UsernameSecure: If op_UsernameSecure is specified, this value will be compared to the value in UsernameSecure using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_UsernameSecure must be specified if op_UsernameSecure is specified.
:type val_c_UsernameSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_Vendor: The operator to apply to the field Vendor. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. Vendor: The vendor devices against which to try this credential. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_Vendor: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_Vendor: If op_Vendor is specified, the field named in this input will be compared to the value in Vendor using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_Vendor must be specified if op_Vendor is specified.
:type val_f_Vendor: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_Vendor: If op_Vendor is specified, this value will be compared to the value in Vendor using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_Vendor must be specified if op_Vendor is specified.
:type val_c_Vendor: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_id: The operator to apply to the field id. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. id: The internal NetMRI identifier for this credential. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_id: If op_id is specified, the field named in this input will be compared to the value in id using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_id must be specified if op_id is specified.
:type val_f_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_id: If op_id is specified, this value will be compared to the value in id using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_id must be specified if op_id is specified.
:type val_c_id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` id
:param sort: The data field(s) to use for sorting the output. Default is id. Valid values are UnitID, Protocol, Origination, UPWUse, HitCount, Vendor, id, Priority, UsernameSecure, PasswordSecure, SecureVersion.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each CLICredential. Valid values are UnitID, Protocol, Origination, UPWUse, HitCount, Vendor, id, Priority, UsernameSecure, PasswordSecure, SecureVersion. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return cli_credentials: An array of the CLICredential objects that match the specified input criteria.
:rtype cli_credentials: Array of CLICredential
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
def create(self, **kwargs):
"""Creates a new cli credential.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` USER
:param Origination: Identifies the source of the credential. 'NETC' indicates an internal credential that may be modified or removed during upgrade processes. 'USER' indicates a user-entered credential.
:type Origination: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param PasswordSecure: The password portion of the credential.
:type PasswordSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param Priority: The priority order in which to attempt this credential.
:type Priority: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` ANY
:param Protocol: The protocol for which to use this credential.
:type Protocol: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` LOCAL
:param UPWUse: Determines the function of the credential. 'GUESS' indicates that this will only be used if vendor default credential collection is enabled, whereas 'LOCAL' means that this credential will be used in all guessing.
:type UPWUse: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param UnitID: The internal NetMRI identifier for the NetMRI collector on which the credential is configured.
:type UnitID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param UsernameSecure: The username portion of the credential.
:type UsernameSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` ANY
:param Vendor: The vendor devices against which to try this credential.
:type Vendor: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return id: The id of the newly created cli credential.
:rtype id: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return model: The class name of the newly created cli credential.
:rtype model: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return uri: A URI that may be used to retrieve the newly created cli credential.
:rtype uri: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return cli_credential: The newly created cli credential.
:rtype cli_credential: CLICredential
"""
return self.api_request(self._get_method_fullname("create"), kwargs)
def update(self, **kwargs):
"""Updates an existing cli credential.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param id: The internal NetMRI identifier for this credential.
:type id: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param Origination: Identifies the source of the credential. 'NETC' indicates an internal credential that may be modified or removed during upgrade processes. 'USER' indicates a user-entered credential. If omitted, this field will not be updated.
:type Origination: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PasswordSecure: The password portion of the credential. If omitted, this field will not be updated.
:type PasswordSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param Priority: The priority order in which to attempt this credential. If omitted, this field will not be updated.
:type Priority: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param Protocol: The protocol for which to use this credential. If omitted, this field will not be updated.
:type Protocol: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param UnitID: The internal NetMRI identifier for the NetMRI collector on which the credential is configured. If omitted, this field will not be updated.
:type UnitID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param UsernameSecure: The username portion of the credential. If omitted, this field will not be updated.
:type UsernameSecure: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param Vendor: The vendor devices against which to try this credential. If omitted, this field will not be updated.
:type Vendor: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return id: The id of the updated cli credential.
:rtype id: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return model: The class name of the updated cli credential.
:rtype model: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return uri: A URI that may be used to retrieve the updated cli credential.
:rtype uri: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return cli_credential: The updated cli credential.
:rtype cli_credential: CLICredential
"""
return self.api_request(self._get_method_fullname("update"), kwargs)
def destroy(self, **kwargs):
"""Deletes the specified cli credential from NetMRI.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param id: The internal NetMRI identifier for this credential.
:type id: Integer
**Outputs**
"""
return self.api_request(self._get_method_fullname("destroy"), kwargs)
def test_ssh_telnet(self, **kwargs):
"""Executes cli credential test and returns results or status id based on async_ind
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: Device ID to specify what device to test cli credentials on (takes precedence over IP address)
:type DeviceID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param UnitID: ID of the collector to send the request to, OC only
:type UnitID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ip_address: IP Address to test id DeviceID is not given
:type ip_address: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param VirtualNetworkID: The ID for Virtual Network, must be unique, only needed if DeviceID not set
:type VirtualNetworkID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ssh_username: SSH username
:type ssh_username: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ssh_password: SSH password
:type ssh_password: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param telnet_username: Telnet username
:type telnet_username: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param telnet_password: Telnet password
:type telnet_password: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param enable_password: Enable mode password
:type enable_password: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` False
:param async_ind: When false, the credential test will be run synchronously, and the API call will block until it is complete. When true, credential test id will be returned to use for subsequent calls
:type async_ind: Boolean
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return text: When async_ind is false, credential test text will be returned upon completion.
:rtype text: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return id: The internal #{Brand.lookup(:PRODUCT_NAME_ONLY)} identifier for previously initiated credential test.
:rtype id: String
"""
return self.api_request(self._get_method_fullname("test_ssh_telnet"), kwargs)
def test_ssh_telnet_status(self, **kwargs):
"""CLI credential test status
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param id: Credential test id needed to retrieve status
:type id: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param start: The starting index(inclusive) of the returned text of the credential test.
:type start: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return text: credential test result
:rtype text: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return start: The starting index(inclusive) of the returned text of the credential test.
:rtype start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return end: The ending index (exclusive) of the returned text of the credential test.
:rtype end: Integer
"""
return self.api_request(self._get_method_fullname("test_ssh_telnet_status"), kwargs)
| 45.533732
| 545
| 0.577457
| 6,325
| 53,320
| 4.821028
| 0.05502
| 0.085265
| 0.055423
| 0.065228
| 0.914571
| 0.911947
| 0.895222
| 0.88135
| 0.878956
| 0.872266
| 0
| 0.004443
| 0.333102
| 53,320
| 1,171
| 546
| 45.533732
| 0.853089
| 0.788691
| 0
| 0
| 0
| 0
| 0.059211
| 0.014474
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0.047619
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
b8764fa8377ae899fae2ae3c918464740ed86c66
| 13,518
|
py
|
Python
|
mmvae_hub/networks/iwVaes.py
|
Jimmy2027/MMVAE_mnist_svhn_text
|
e6e74059bd5feefc0af088f7b1abc31b0e9f2ab7
|
[
"MIT"
] | null | null | null |
mmvae_hub/networks/iwVaes.py
|
Jimmy2027/MMVAE_mnist_svhn_text
|
e6e74059bd5feefc0af088f7b1abc31b0e9f2ab7
|
[
"MIT"
] | 24
|
2021-11-15T14:22:08.000Z
|
2022-02-28T14:30:23.000Z
|
mmvae_hub/networks/iwVaes.py
|
Jimmy2027/MMVAE_mnist_svhn_text
|
e6e74059bd5feefc0af088f7b1abc31b0e9f2ab7
|
[
"MIT"
] | null | null | null |
import math
import torch.distributions as distr
import torch.nn.functional as F
from mmvae_hub.networks.FlowVaes import MoFoPoE
# from mmvae_hub.networks.GfMVaes import MopGfM
#
from mmvae_hub.networks.MixtureVaes import MOEMMVae, MoPoEMMVae
from mmvae_hub.networks.PoEMMVAE import POEMMVae
from mmvae_hub.networks.utils.utils import get_distr
from mmvae_hub.utils.Dataclasses.iwdataclasses import *
from mmvae_hub.utils.metrics.likelihood import log_mean_exp
def log_mean_exp(value, dim=0, keepdim=False):
return torch.logsumexp(value, dim, keepdim=keepdim) - math.log(value.size(dim))
class iwMMVAE():
def __init__(self, flags):
self.K = flags.K
def conditioned_generation(self, input_samples: dict, subset_key: str, style=None):
"""
Generate samples conditioned with input samples for a given subset.
subset_key str: The key indicating which subset is used for the generation.
"""
# infer latents from batch
enc_mods, joint_latents = self.inference(input_samples)
subset_embedding = joint_latents.subsets[subset_key].qz_x_tilde.mean
cond_mod_in = ReparamLatent(content=subset_embedding, style=style)
return self.generate_from_latents(cond_mod_in)
def decode(self, enc_mods: Mapping[str, BaseEncMod], joint_latents: iwJointLatents) -> dict:
"""Decoder outputs each reconstructed modality as a dict."""
rec_mods = {}
for subset_str, subset in joint_latents.subsets.items():
subset_samples = subset.zs.reshape((self.K * self.flags.batch_size, self.flags.class_dim))
rec_mods[subset_str] = {
out_mod_str: dec_mod.calc_likelihood(class_embeddings=subset_samples,
unflatten=(self.K, self.flags.batch_size)
)
for out_mod_str, dec_mod in self.modalities.items()
}
return rec_mods
class iwPoE(iwMMVAE, POEMMVae):
def __init__(self, exp, flags, modalities, subsets):
POEMMVae.__init__(self, exp, flags, modalities, subsets)
iwMMVAE.__init__(self, flags)
self.prior = get_distr(flags.prior)(loc=torch.zeros(1, self.flags.class_dim, device=self.flags.device),
scale=torch.ones(1, self.flags.class_dim, device=self.flags.device))
def forward(self, input_batch: dict) -> iwForwardResults:
enc_mods, joint_latents = self.inference(input_batch)
# reconstruct modalities
rec_mods = self.decode(enc_mods, joint_latents)
return iwForwardResults(enc_mods=enc_mods, joint_latents=joint_latents, rec_mods=rec_mods)
def inference(self, input_batch) -> tuple[Mapping[str, BaseEncMod], iwJointLatents]:
enc_mods, joint_latents = super().inference(input_batch)
subsets = {}
zss = {}
for subset_str, subset in joint_latents.subsets.items():
qz_x_tilde = distr.Normal(loc=subset.mu, scale=subset.logvar)
subsets[subset_str] = iwSubset(qz_x_tilde=qz_x_tilde, zs=qz_x_tilde.rsample(torch.Size([self.K])))
# find the subset will all modalities to get the joint distr
max_subset_size = max(len(subset_str.split('_')) for subset_str in joint_latents.fusion_subsets_keys)
joint_distr = subsets[[subset_str for subset_str in joint_latents.fusion_subsets_keys if
len(subset_str.split('_')) == max_subset_size][0]]
joint_latents = iwJointLatents(fusion_subsets_keys=joint_latents.fusion_subsets_keys, subsets=subsets, zss=zss,
joint_distr=joint_distr)
return enc_mods, joint_latents
def encode(self, input_batch: Mapping[str, Tensor]) -> Mapping[str, BaseEncMod]:
enc_mods = {}
for mod_str, mod in self.modalities.items():
if mod_str in input_batch:
enc_mods[mod_str] = {}
_, _, class_mu, class_logvar = mod.encoder(input_batch[mod_str])
latents_class = Distr(mu=class_mu,
logvar=F.softmax(class_logvar, dim=-1) * class_logvar.size(-1) + 1e-6)
enc_mods[mod_str] = BaseEncMod(latents_class=latents_class)
return enc_mods
def calculate_loss(self, forward_results: iwForwardResults, batch_d: dict) -> tuple[
float, float, dict, Mapping[str, float]]:
subsets = forward_results.joint_latents.subsets
losses = []
klds = {}
log_probs = {}
for mod_str, enc_mod in forward_results.enc_mods.items():
subset = subsets[mod_str]
# sum(-1) is the sum over the class dim
lpz = self.prior.log_prob(
subset.zs).sum(-1)
# take the log mean exp over the modalities
lqz_x = log_mean_exp(
torch.stack(
[subsets[mod].qz_x_tilde.log_prob(subset.zs).sum(-1) for mod in forward_results.enc_mods]))
lpx_z = [px_z.log_prob(batch_d[out_mod_str]).view(*px_z.batch_shape[:2], -1).sum(-1)
for out_mod_str, px_z in forward_results.rec_mods[mod_str].items()]
# sum over modalities
lpx_z = torch.stack(lpx_z).sum(0)
kl_div = lpz - lqz_x
loss = lpx_z + kl_div
losses.append(loss)
log_probs[mod_str] = lpx_z.mean()
klds[mod_str] = log_mean_exp(kl_div).sum()
total_loss = -log_mean_exp(torch.cat(losses, 1)).sum()
# joint_div average of all subset divs
joint_div = torch.cat(tuple(div.unsqueeze(dim=0) for _, div in klds.items()))
# normalize with the number of samples
joint_div = joint_div.mean()
return total_loss, joint_div, log_probs, klds
class iwMoE(iwMMVAE, MOEMMVae):
def __init__(self, exp, flags, modalities, subsets):
MOEMMVae.__init__(self, exp, flags, modalities, subsets)
iwMMVAE.__init__(self, flags)
self.prior = get_distr(flags.prior)(loc=torch.zeros(1, self.flags.class_dim, device=self.flags.device),
scale=torch.ones(1, self.flags.class_dim, device=self.flags.device))
def forward(self, input_batch: dict) -> iwForwardResults:
enc_mods, joint_latents = self.inference(input_batch)
# reconstruct modalities
rec_mods = self.decode(enc_mods, joint_latents)
return iwForwardResults(enc_mods=enc_mods, joint_latents=joint_latents, rec_mods=rec_mods)
def inference(self, input_batch) -> tuple[Mapping[str, BaseEncMod], iwJointLatents]:
enc_mods, joint_latents = super().inference(input_batch)
subsets = {}
zss = {}
for subset_str, subset in joint_latents.subsets.items():
qz_x_tilde = distr.Normal(loc=subset.mu, scale=subset.logvar)
subsets[subset_str] = iwSubset(qz_x_tilde=qz_x_tilde, zs=qz_x_tilde.rsample(torch.Size([self.K])))
# find the subset will all modalities to get the joint distr
max_subset_size = max(len(subset_str.split('_')) for subset_str in joint_latents.fusion_subsets_keys)
joint_distr = subsets[[subset_str for subset_str in joint_latents.fusion_subsets_keys if
len(subset_str.split('_')) == max_subset_size][0]]
joint_latents = iwJointLatents(fusion_subsets_keys=joint_latents.fusion_subsets_keys, subsets=subsets, zss=zss,
joint_distr=joint_distr)
return enc_mods, joint_latents
def encode(self, input_batch: Mapping[str, Tensor]) -> Mapping[str, BaseEncMod]:
enc_mods = {}
for mod_str, mod in self.modalities.items():
if mod_str in input_batch:
enc_mods[mod_str] = {}
_, _, class_mu, class_logvar = mod.encoder(input_batch[mod_str])
latents_class = Distr(mu=class_mu,
logvar=F.softmax(class_logvar, dim=-1) * class_logvar.size(-1) + 1e-6)
enc_mods[mod_str] = BaseEncMod(latents_class=latents_class)
return enc_mods
def calculate_loss(self, forward_results: iwForwardResults, batch_d: dict) -> tuple[
float, float, dict, Mapping[str, float]]:
subsets = forward_results.joint_latents.subsets
losses = []
klds = {}
log_probs = {}
for mod_str, enc_mod in forward_results.enc_mods.items():
subset = subsets[mod_str]
# sum(-1) is the sum over the class dim
lpz = self.prior.log_prob(
subset.zs).sum(-1)
# take the log mean exp over the modalities
lqz_x = log_mean_exp(
torch.stack(
[subsets[mod].qz_x_tilde.log_prob(subset.zs).sum(-1) for mod in forward_results.enc_mods]))
lpx_z = [px_z.log_prob(batch_d[out_mod_str]).view(*px_z.batch_shape[:2], -1).sum(-1)
for out_mod_str, px_z in forward_results.rec_mods[mod_str].items()]
# sum over modalities
lpx_z = torch.stack(lpx_z).sum(0)
kl_div = lpz - lqz_x
loss = lpx_z + kl_div
losses.append(loss)
log_probs[mod_str] = lpx_z.mean()
klds[mod_str] = log_mean_exp(kl_div).sum()
total_loss = -log_mean_exp(torch.cat(losses, 1)).sum()
# joint_div average of all subset divs
joint_div = torch.cat(tuple(div.unsqueeze(dim=0) for _, div in klds.items()))
# normalize with the number of samples
joint_div = joint_div.mean()
return total_loss, joint_div, log_probs, klds
class iwMoPoE(iwMMVAE, MoPoEMMVae):
def __init__(self, exp, flags, modalities, subsets):
MoPoEMMVae.__init__(self, exp, flags, modalities, subsets)
iwMMVAE.__init__(self, flags)
self.prior = get_distr(flags.prior)(loc=torch.zeros(1, self.flags.class_dim, device=self.flags.device),
scale=torch.ones(1, self.flags.class_dim, device=self.flags.device))
self.K = flags.K
def forward(self, input_batch: dict) -> iwForwardResults:
enc_mods, joint_latents = self.inference(input_batch)
# reconstruct modalities
rec_mods = self.decode(enc_mods, joint_latents)
return iwForwardResults(enc_mods=enc_mods, joint_latents=joint_latents, rec_mods=rec_mods)
def inference(self, input_batch) -> tuple[Mapping[str, BaseEncMod], iwJointLatents]:
enc_mods, joint_latents = super().inference(input_batch)
subsets = {}
zss = {}
for subset_str, subset in joint_latents.subsets.items():
qz_x_tilde = distr.Normal(loc=subset.mu, scale=subset.logvar.exp())
subsets[subset_str] = iwSubset(qz_x_tilde=qz_x_tilde, zs=qz_x_tilde.rsample(torch.Size([self.K])))
# find the subset will all modalities to get the joint distr
max_subset_size = max(len(subset_str.split('_')) for subset_str in joint_latents.fusion_subsets_keys)
joint_distr = subsets[[subset_str for subset_str in joint_latents.fusion_subsets_keys if
len(subset_str.split('_')) == max_subset_size][0]]
joint_latents = iwJointLatents(fusion_subsets_keys=joint_latents.fusion_subsets_keys, subsets=subsets, zss=zss,
joint_distr=joint_distr)
return enc_mods, joint_latents
def encode(self, input_batch: Mapping[str, Tensor]) -> Mapping[str, BaseEncMod]:
enc_mods = {}
for mod_str, mod in self.modalities.items():
if mod_str in input_batch:
enc_mods[mod_str] = {}
_, _, class_mu, class_logvar = mod.encoder(input_batch[mod_str])
latents_class = Distr(mu=class_mu,
logvar=F.softmax(class_logvar, dim=-1) * class_logvar.size(-1) + 1e-6)
enc_mods[mod_str] = BaseEncMod(latents_class=latents_class)
return enc_mods
def calculate_loss(self, forward_results: iwForwardResults, batch_d: dict) -> tuple[
float, float, dict, Mapping[str, float]]:
subsets = forward_results.joint_latents.subsets
losses = []
klds = {}
log_probs = {}
for mod_str, subset in subsets.items():
# sum over last dim
lpz = self.prior.log_prob(subset.zs).sum(-1)
# lqz_x = log_mean_exp(
# torch.stack(
# [subset_.qz_x_tilde.log_prob(subset_.zs).sum(-1) for _, subset_ in subsets.items()]))
lqz_x = subset.qz_x_tilde.log_prob(subset.zs).sum(-1)
lpx_z = [px_z.log_prob(batch_d[out_mod_str]).view(*px_z.batch_shape[:2], -1).sum(-1)
for out_mod_str, px_z in forward_results.rec_mods[mod_str].items()]
lpx_z = torch.stack(lpx_z).sum(0)
kl_div = lpz - lqz_x
loss = lpx_z + kl_div
losses.append(loss)
log_probs[mod_str] = lpx_z.mean()
klds[mod_str] = log_mean_exp(kl_div).sum()
total_loss = -log_mean_exp(torch.cat(losses, 1)).sum()
# joint_div average of all subset divs
joint_div = torch.cat(tuple(div.unsqueeze(dim=0) for _, div in klds.items()))
# normalize with the number of samples
joint_div = joint_div.mean()
return total_loss, joint_div, log_probs, klds
| 43.747573
| 119
| 0.631824
| 1,787
| 13,518
| 4.493565
| 0.095691
| 0.059776
| 0.016936
| 0.037858
| 0.842217
| 0.832752
| 0.827024
| 0.805978
| 0.805978
| 0.800498
| 0
| 0.004942
| 0.26646
| 13,518
| 308
| 120
| 43.88961
| 0.804861
| 0.080633
| 0
| 0.810945
| 0
| 0
| 0.000485
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.094527
| false
| 0
| 0.044776
| 0.004975
| 0.233831
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b87fe4331ead7e6f053ae1e3ad99121a1d3fac11
| 10,004
|
py
|
Python
|
venv/Include/Matrix.py
|
matthijsvanvliet/raytracing-python
|
73d692b47330ab94eedde579a51063e3a907e92b
|
[
"MIT"
] | 1
|
2021-06-03T11:34:15.000Z
|
2021-06-03T11:34:15.000Z
|
venv/Include/Matrix.py
|
matthijsvanvliet/raytracing-python
|
73d692b47330ab94eedde579a51063e3a907e92b
|
[
"MIT"
] | null | null | null |
venv/Include/Matrix.py
|
matthijsvanvliet/raytracing-python
|
73d692b47330ab94eedde579a51063e3a907e92b
|
[
"MIT"
] | null | null | null |
from Include.Tuple import *
class Matrix4:
length = 4
def __init__(self):
self.m = [[0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0], [0, 0, 0, 0]]
@staticmethod
def identity_matrix():
M = Matrix4()
M.m[0][0] = 1.0
M.m[1][1] = 1.0
M.m[2][2] = 1.0
M.m[3][3] = 1.0
return M
def transpose(self):
M = Matrix4()
for row in range(M.length):
for col in range(M.length):
M.m[row][col] = self.m[col][row]
return M
def determinant(self):
det = 0
for x in range(self.length):
det += self.m[0][x] * self.cofactor(0, x)
return det
def submatrix(self, row, column):
M = Matrix3()
i = 0
for r in range(self.length):
j = 0
if (r == row):
continue
for c in range(self.length):
if (c == column):
continue
M.m[i][j] = self.m[r][c]
j += 1
i += 1
return M
def minor(self, row, column):
return self.submatrix(row, column).determinant()
def cofactor(self, row, column):
return self.minor(row, column) if (row + column) % 2 == 0 else -self.minor(row, column)
def invertible(self):
return self.determinant() != 0
def invert(self):
if (not self.invertible()):
return
M = Matrix4()
for row in range(self.length):
for col in range(self.length):
c = self.cofactor(row, col)
M.m[col][row] = c / self.determinant()
return M
def translate(self, x, y, z):
M = Matrix4.identity_matrix()
M.m[0][3] = x
M.m[1][3] = y
M.m[2][3] = z
return M * self
def scale(self, x, y, z):
M = Matrix4.identity_matrix()
M.m[0][0] = x
M.m[1][1] = y
M.m[2][2] = z
return M * self
def rotate_x(self, radians):
M = Matrix4.identity_matrix()
M.m[1][1] = math.cos(radians)
M.m[1][2] = -math.sin(radians)
M.m[2][1] = math.sin(radians)
M.m[2][2] = math.cos(radians)
return M * self
def rotate_y(self, radians):
M = Matrix4.identity_matrix()
M.m[0][0] = math.cos(radians)
M.m[0][2] = math.sin(radians)
M.m[2][0] = -math.sin(radians)
M.m[2][2] = math.cos(radians)
return M * self
def rotate_z(self, radians):
M = Matrix4.identity_matrix()
M.m[0][0] = math.cos(radians)
M.m[0][1] = -math.sin(radians)
M.m[1][0] = math.sin(radians)
M.m[1][1] = math.cos(radians)
return M * self
def shear(self, xy, xz, yx, yz, zx, zy):
M = Matrix4.identity_matrix()
M.m[0][1] = xy
M.m[0][2] = xz
M.m[1][0] = yx
M.m[1][2] = yz
M.m[2][0] = zx
M.m[2][1] = zy
return M * self
def __eq__(self, other):
for x in range(self.length):
for y in range(self.length):
if (abs(self.m[x][y] - other.m[x][y]) > EPSILON):
return False
return True
def __mul__(self, other):
if (type(other) is Matrix4):
M = Matrix4()
for row in range(M.length):
for col in range(M.length):
for index in range(M.length):
M.m[row][col] += (self.m[row][index] * other.m[index][col])
return M
elif (type(other) is Tuple):
return Tuple(self.m[0][0] * other.x + self.m[0][1] * other.y + self.m[0][2] * other.z + self.m[0][3] * other.w,
self.m[1][0] * other.x + self.m[1][1] * other.y + self.m[1][2] * other.z + self.m[1][3] * other.w,
self.m[2][0] * other.x + self.m[2][1] * other.y + self.m[2][2] * other.z + self.m[2][3] * other.w,
self.m[3][0] * other.x + self.m[3][1] * other.y + self.m[3][2] * other.z + self.m[3][3] * other.w)
class Matrix3:
length = 3
def __init__(self):
self.m = [[0, 0, 0], [0, 0, 0], [0, 0, 0]]
def determinant(self):
det = 0
for x in range(self.length):
det += self.m[0][x] * self.cofactor(0, x)
return det
def submatrix(self, row, column):
M = Matrix2()
i = 0
for r in range(self.length):
j = 0
if (r == row):
continue
for c in range(self.length):
if (c == column):
continue
M.m[i][j] = self.m[r][c]
j += 1
i += 1
return M
def minor(self, row, column):
return self.submatrix(row, column).determinant()
def cofactor(self, row, column):
return self.minor(row, column) if (row + column) % 2 == 0 else -self.minor(row, column)
def __eq__(self, other):
for x in range(self.length):
for y in range(self.length):
if (abs(self.m[x][y] - other.m[x][y]) > EPSILON):
return False
return True
class Matrix2:
length = 2
def __init__(self):
self.m = [[0, 0], [0, 0]]
def determinant(self):
return self.m[0][0] * self.m[1][1] - self.m[0][1] * self.m[1][0]
def __eq__(self, other):
for x in range(self.length):
for y in range(self.length):
if (abs(self.m[x][y] - other.m[x][y]) > EPSILON):
return False
return True
#
# Matrix class with variable width and hight (not optimal)
#
class Matrix:
def __init__(self, width: int, height: int):
self.width = width
self.height = height
self.m = [[0.0 for x in range(width)] for y in range(height)]
@staticmethod
def identity_matrix(length: int):
M = Matrix(length, length)
for x in range(length):
M.m[x][x] = 1
return M
def transpose(self):
M = Matrix(4, 4)
for row in range(M.width):
for col in range(M.height):
M.m[row][col] = self.m[col][row]
return M
def determinant(self):
if (self.width == 2):
return self.m[0][0] * self.m[1][1] - self.m[0][1] * self.m[1][0]
else:
det = 0
for x in range(self.width):
det += self.m[0][x] * self.cofactor(0, x)
return det
def submatrix(self, row, column):
M = Matrix(self.width-1, self.height-1)
list = []
index = 0
for x in range(self.width):
if (x != row):
for y in range(self.height):
if (y != column):
list.append(self.m[x][y])
for x in range(M.width):
for y in range(M.height):
M.m[x][y] = list[index]
index += 1
return M
def minor(self, row, column):
return self.submatrix(row, column).determinant()
def cofactor(self, row, column):
return self.minor(row, column) if (row + column) % 2 == 0 else -self.minor(row, column)
def invertible(self):
return self.determinant() != 0
def inverse(self):
if (not self.invertible()):
return
M2 = Matrix(self.width, self.height)
for row in range(self.width):
for col in range(self.height):
c = self.cofactor(row, col)
M2.m[col][row] = c / self.determinant()
return M2
def translate(self, x, y, z):
M = Matrix.identity_matrix()
M.m[0][3] = x
M.m[1][3] = y
M.m[2][3] = z
return M * self
def scale(self, x, y, z):
M = Matrix.identity_matrix()
M.m[0][0] = x
M.m[1][1] = y
M.m[2][2] = z
return M * self
def rotate_x(self, radians):
M = Matrix.identity_matrix()
M.m[1][1] = math.cos(radians)
M.m[1][2] = -math.sin(radians)
M.m[2][1] = math.sin(radians)
M.m[2][2] = math.cos(radians)
return M * self
def rotate_y(self, radians):
M = Matrix.identity_matrix()
M.m[0][0] = math.cos(radians)
M.m[0][2] = math.sin(radians)
M.m[2][0] = -math.sin(radians)
M.m[2][2] = math.cos(radians)
return M * self
def rotate_z(self, radians):
M = Matrix.identity_matrix()
M.m[0][0] = math.cos(radians)
M.m[0][1] = -math.sin(radians)
M.m[1][0] = math.sin(radians)
M.m[1][1] = math.cos(radians)
return M * self
def shear(self, xy, xz, yx, yz, zx, zy):
M = Matrix.identity_matrix()
M.m[0][1] = xy
M.m[0][2] = xz
M.m[1][0] = yx
M.m[1][2] = yz
M.m[2][0] = zx
M.m[2][1] = zy
return M * self
def __eq__(self, other):
for x in range(self.width):
for y in range(self.height):
if (abs(self.m[x][y] - other.m[x][y]) > EPSILON):
return False
return True
def __mul__(self, other):
if (type(other) is Matrix):
M = Matrix(self.width, self.height)
for row in range(M.width):
for col in range(M.height):
for index in range(M.width):
M.m[row][col] += (self.m[row][index] * other.m[index][col])
return M
elif (type(other) is Tuple):
return Tuple(self.m[0][0] * other.x + self.m[0][1] * other.y + self.m[0][2] * other.z + self.m[0][3] * other.w,
self.m[1][0] * other.x + self.m[1][1] * other.y + self.m[1][2] * other.z + self.m[1][3] * other.w,
self.m[2][0] * other.x + self.m[2][1] * other.y + self.m[2][2] * other.z + self.m[2][3] * other.w,
self.m[3][0] * other.x + self.m[3][1] * other.y + self.m[3][2] * other.z + self.m[3][3] * other.w)
| 30.13253
| 123
| 0.470312
| 1,515
| 10,004
| 3.066007
| 0.056106
| 0.026265
| 0.014855
| 0.017223
| 0.887621
| 0.853175
| 0.821529
| 0.778041
| 0.775242
| 0.757374
| 0
| 0.043953
| 0.367753
| 10,004
| 332
| 124
| 30.13253
| 0.690435
| 0.005598
| 0
| 0.80292
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.156934
| false
| 0
| 0.00365
| 0.032847
| 0.361314
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b21974a0601cfab260118d3bb64a5f5c6dd81081
| 1,101
|
py
|
Python
|
clickhouse_manager/config.py
|
Altinity/clickhouse-cluster-manager
|
744447893fbe8533fbce8d2266c2155c3e5b9a23
|
[
"MIT"
] | 21
|
2017-12-17T04:49:59.000Z
|
2021-08-31T05:27:07.000Z
|
clickhouse_manager/config.py
|
Altinity/clickhouse-cluster-manager
|
744447893fbe8533fbce8d2266c2155c3e5b9a23
|
[
"MIT"
] | 3
|
2018-10-28T20:35:51.000Z
|
2021-08-14T07:04:45.000Z
|
clickhouse_manager/config.py
|
Altinity/clickhouse-cluster-manager
|
744447893fbe8533fbce8d2266c2155c3e5b9a23
|
[
"MIT"
] | 9
|
2017-08-09T11:34:38.000Z
|
2021-05-18T07:38:10.000Z
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class Config(object):
config = None
def __init__(self, config):
self.config = config
def __str__(self):
return str(self.config)
def __getitem__(self, item):
return self.config[item]
def interactive(self):
return self.config['app']['interactive']
def dry(self):
return self.config['app']['dry']
def log_file(self):
return self.config['app']['log-file']
def log_level(self):
return self.config['app']['log-level']
def pid_file(self):
return self.config['app']['pid_file']
def ch_config_folder(self):
return self.config['manager']['config-folder']
def ch_config_file(self):
return self.config['manager']['config.xml']
def ch_config_user_file(self):
return self.config['manager']['user.xml']
def ssh_username(self):
return self.config['ssh']['username']
def ssh_password(self):
return self.config['ssh']['password']
def ssh_port(self):
return self.config['ssh']['port']
| 22.02
| 54
| 0.605813
| 141
| 1,101
| 4.546099
| 0.241135
| 0.234009
| 0.299532
| 0.343214
| 0.466459
| 0.287051
| 0
| 0
| 0
| 0
| 0
| 0.001189
| 0.236149
| 1,101
| 49
| 55
| 22.469388
| 0.760999
| 0.038147
| 0
| 0
| 0
| 0
| 0.12772
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.466667
| false
| 0.066667
| 0
| 0.433333
| 0.966667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 7
|
b23455a012d3cdd23984ce1c2dd7d5a83a7457d2
| 7,072
|
py
|
Python
|
SBaaS_quantification/stage01_quantification_averages_postgresql_models.py
|
dmccloskey/SBaaS_quantification
|
b2a9c7a9a0d318f22ff20e311f94c213852ba914
|
[
"MIT"
] | null | null | null |
SBaaS_quantification/stage01_quantification_averages_postgresql_models.py
|
dmccloskey/SBaaS_quantification
|
b2a9c7a9a0d318f22ff20e311f94c213852ba914
|
[
"MIT"
] | null | null | null |
SBaaS_quantification/stage01_quantification_averages_postgresql_models.py
|
dmccloskey/SBaaS_quantification
|
b2a9c7a9a0d318f22ff20e311f94c213852ba914
|
[
"MIT"
] | null | null | null |
from SBaaS_base.postgresql_orm_base import *
class data_stage01_quantification_averagesMI(Base):
__tablename__ = 'data_stage01_quantification_averagesmi'
id = Column(Integer, Sequence('data_stage01_quantification_averagesmi_id_seq'), primary_key=True)
experiment_id = Column(String(50))
sample_name_abbreviation = Column(String(100))
time_point = Column(String(10))
component_group_name = Column(String(100))
component_name = Column(String(500))
n_replicates = Column(Integer)
calculated_concentration_average = Column(Float)
calculated_concentration_cv = Column(Float)
calculated_concentration_units = Column(String(20))
used_ = Column(Boolean);
__table_args__ = (UniqueConstraint('experiment_id','sample_name_abbreviation','time_point','component_name','calculated_concentration_units'),
)
def __init__(self,
row_dict_I,
):
self.experiment_id=row_dict_I['experiment_id'];
self.sample_name_abbreviation=row_dict_I['sample_name_abbreviation'];
self.time_point=row_dict_I['time_point'];
self.component_group_name=row_dict_I['component_group_name'];
self.component_name=row_dict_I['component_name'];
self.n_replicates=row_dict_I['n_replicates'];
self.calculated_concentration_average=row_dict_I['calculated_concentration_average'];
self.calculated_concentration_cv=row_dict_I['calculated_concentration_cv'];
self.calculated_concentration_units=row_dict_I['calculated_concentration_units'];
self.used_=row_dict_I['used_'];
def __set__row__(self, experiment_id_I, sample_name_abbreviation_I, time_point_I, component_group_name_I, component_name_I,
n_replicates_I, calculated_concentration_average_I, calculated_concentration_cv_I,
calculated_concentration_units_I, used_I):
self.experiment_id = experiment_id_I;
self.sample_name_abbreviation = sample_name_abbreviation_I;
self.time_point = time_point_I;
self.component_group_name = component_group_name_I;
self.component_name = component_name_I;
self.n_replicates = n_replicates_I;
self.calculated_concentration_average = calculated_concentration_average_I;
self.calculated_concentration_cv = calculated_concentration_cv_I;
self.calculated_concentration_units = calculated_concentration_units_I;
self.used_ = used_I;
def __repr__dict__(self):
return {'id':self.id,
"experiment_id":self.experiment_id,
"sample_name_abbreviation":self.sample_name_abbreviation,
"time_point":self.time_point,
"component_group_name":self.component_group_name,
"component_name":self.component_name,
"calculated_concentration_average":self.calculated_concentration_average,
"calculated_concentration_cv":self.calculated_concentration_cv,
"calculated_concentration_units":self.calculated_concentration_units,
"used_":self.used_}
def __repr__json__(self):
return json.dumps(self.__repr__dict__())
class data_stage01_quantification_averagesMIgeo(Base):
__tablename__ = 'data_stage01_quantification_averagesmigeo'
id = Column(Integer, Sequence('data_stage01_quantification_averagesmigeo_id_seq'), primary_key=True)
experiment_id = Column(String(50))
sample_name_abbreviation = Column(String(100))
time_point = Column(String(10))
component_group_name = Column(String(100))
component_name = Column(String(500))
n_replicates = Column(Integer)
calculated_concentration_average = Column(Float)
calculated_concentration_var = Column(Float)
calculated_concentration_lb = Column(Float)
calculated_concentration_ub = Column(Float)
calculated_concentration_units = Column(String(20))
used_ = Column(Boolean);
__table_args__ = (UniqueConstraint('experiment_id','sample_name_abbreviation','time_point','component_name','calculated_concentration_units'),
)
def __init__(self,
row_dict_I,
):
self.n_replicates=row_dict_I['n_replicates'];
self.calculated_concentration_average=row_dict_I['calculated_concentration_average'];
self.calculated_concentration_var=row_dict_I['calculated_concentration_var'];
self.calculated_concentration_lb=row_dict_I['calculated_concentration_lb'];
self.calculated_concentration_ub=row_dict_I['calculated_concentration_ub'];
self.calculated_concentration_units=row_dict_I['calculated_concentration_units'];
self.used_=row_dict_I['used_'];
self.component_name=row_dict_I['component_name'];
self.component_group_name=row_dict_I['component_group_name'];
self.time_point=row_dict_I['time_point'];
self.sample_name_abbreviation=row_dict_I['sample_name_abbreviation'];
self.experiment_id=row_dict_I['experiment_id'];
def __set__row__(self, experiment_id_I, sample_name_abbreviation_I, time_point_I, component_group_name_I, component_name_I,
n_replicates_I, calculated_concentration_average_I, calculated_concentration_var_I,
calculated_concentration_lb_I, calculated_concentration_ub_I,
calculated_concentration_units_I, used_I):
self.experiment_id = experiment_id_I;
self.sample_name_abbreviation = sample_name_abbreviation_I;
self.time_point = time_point_I;
self.component_group_name = component_group_name_I;
self.component_name = component_name_I;
self.n_replicates = n_replicates_I;
self.calculated_concentration_average = calculated_concentration_average_I;
self.calculated_concentration_var = calculated_concentration_var_I;
self.calculated_concentration_lb = calculated_concentration_lb_I;
self.calculated_concentration_ub = calculated_concentration_ub_I;
self.calculated_concentration_units = calculated_concentration_units_I;
self.used_ = used_I;
def __repr__dict__(self):
return {'id':self.id,
"experiment_id":self.experiment_id,
"sample_name_abbreviation":self.sample_name_abbreviation,
"time_point":self.time_point,
"component_group_name":self.component_group_name,
"component_name":self.component_name,
"n_replicates":self.n_replicates,
"calculated_concentration_average":self.calculated_concentration_average,
"calculated_concentration_var":self.calculated_concentration_cv,
"calculated_concentration_lb":self.calculated_concentration_lb,
"calculated_concentration_ub":self.calculated_concentration_ub,
"calculated_concentration_units":self.calculated_concentration_units,
"used_":self.used_}
def __repr__json__(self):
return json.dumps(self.__repr__dict__())
| 55.25
| 146
| 0.732607
| 801
| 7,072
| 5.878901
| 0.078652
| 0.322361
| 0.040773
| 0.03058
| 0.930771
| 0.898068
| 0.801444
| 0.781058
| 0.767467
| 0.704183
| 0
| 0.007308
| 0.187359
| 7,072
| 128
| 147
| 55.25
| 0.811902
| 0
| 0
| 0.756303
| 0
| 0
| 0.170249
| 0.119061
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.008403
| null | null | 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
b2a1dc491e94a0a78beede38e37e13d21ab4077e
| 2,444
|
py
|
Python
|
engine/test_ros_service.py
|
cnrancher/os-tests
|
57d46413954e602e81cad287410dfecf46bfef84
|
[
"Apache-2.0"
] | 2
|
2018-11-14T17:02:02.000Z
|
2019-07-19T07:13:41.000Z
|
engine/test_ros_service.py
|
cnrancher/os-tests
|
57d46413954e602e81cad287410dfecf46bfef84
|
[
"Apache-2.0"
] | 13
|
2018-11-06T09:29:50.000Z
|
2019-12-23T07:36:07.000Z
|
engine/test_ros_service.py
|
cnrancher/os-tests
|
57d46413954e602e81cad287410dfecf46bfef84
|
[
"Apache-2.0"
] | 1
|
2018-11-05T04:03:20.000Z
|
2018-11-05T04:03:20.000Z
|
# coding = utf-8
# Create date: 2018-11-15
# Author :Bowen Lee
from utils.connect_to_os import executor, connection
def test_ros_local_service(ros_kvm_init, cloud_config_url):
kwargs = dict(cloud_config='{url}default.yml'.format(url=cloud_config_url), is_install_to_hard_drive=True)
tuple_return = ros_kvm_init(**kwargs)
client = tuple_return[0]
create_test_image = 'echo "FROM $(sudo system-docker images --format '"{{.Repository}}:{{.Tag}}"' | grep os-base)" > Dockerfile'
build_test_image = 'sudo system-docker build -t test_image .'
executor(client, create_test_image)
executor(client, build_test_image)
add_contents = 'echo "test:" > test.yml && echo " image: test_image" >> test.yml ' \
'&& echo " entrypoint: ls" >> test.yml && echo " labels:" >> test.yml ' \
'&& echo " io.rancher.os.scope: system" >> test.yml ' \
'&& echo " io.rancher.os.after: console" >> test.yml'
executor(client, add_contents)
executor(client, 'sudo cp test.yml /var/lib/rancher/conf/test.yml')
executor(client, 'sudo ros service enable /var/lib/rancher/conf/test.yml')
executor(client, 'sudo ros service up test')
output = executor(client, 'sudo ros service logs test')
assert ('bin' in output)
def test_ros_local_service_user(ros_kvm_init, cloud_config_url):
kwargs = dict(cloud_config='{url}default.yml'.format(url=cloud_config_url), is_install_to_hard_drive=True)
tuple_return = ros_kvm_init(**kwargs)
client = tuple_return[0]
create_test_image = 'docker pull alpine && echo "FROM alpine" > Dockerfile'
executor(client, create_test_image)
build_test_image = 'sudo docker build -t test_image_user .'
executor(client, build_test_image)
add_contents = 'echo "test:" > test.yml && echo " image: test_image_user" >> test.yml ' \
'&& echo " entrypoint: ls" >> test.yml && echo " labels:" >> test.yml ' \
'&& echo " io.rancher.os.scope: user" >> test.yml ' \
'&& echo " io.rancher.os.after: console" >> test.yml'
executor(client, add_contents)
executor(client, 'sudo cp test.yml /var/lib/rancher/conf/test.yml')
executor(client, 'sudo ros service enable /var/lib/rancher/conf/test.yml')
executor(client, 'sudo ros service up test')
output = executor(client, 'sudo ros service logs test')
assert ('bin' in output)
| 48.88
| 132
| 0.659574
| 333
| 2,444
| 4.645646
| 0.24024
| 0.081448
| 0.071105
| 0.081448
| 0.832579
| 0.739496
| 0.739496
| 0.739496
| 0.739496
| 0.739496
| 0
| 0.005656
| 0.204173
| 2,444
| 49
| 133
| 49.877551
| 0.789717
| 0.022913
| 0
| 0.702703
| 0
| 0
| 0.447148
| 0.060403
| 0
| 0
| 0
| 0
| 0.054054
| 1
| 0.054054
| false
| 0
| 0.027027
| 0
| 0.081081
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a277ff02a6a0a0c7eeeacc5d9a0d38b3a294dda0
| 2,991
|
py
|
Python
|
GPy/kern/_src/psi_comp/__init__.py
|
strongh/GPy
|
775ce9e64c1e8f472083b8f2430134047d97b2fa
|
[
"BSD-3-Clause"
] | 1
|
2015-08-06T13:47:10.000Z
|
2015-08-06T13:47:10.000Z
|
GPy/kern/_src/psi_comp/__init__.py
|
strongh/GPy
|
775ce9e64c1e8f472083b8f2430134047d97b2fa
|
[
"BSD-3-Clause"
] | null | null | null |
GPy/kern/_src/psi_comp/__init__.py
|
strongh/GPy
|
775ce9e64c1e8f472083b8f2430134047d97b2fa
|
[
"BSD-3-Clause"
] | 1
|
2021-12-09T01:31:17.000Z
|
2021-12-09T01:31:17.000Z
|
# Copyright (c) 2012, GPy authors (see AUTHORS.txt).
# Licensed under the BSD 3-clause license (see LICENSE.txt)
from ....core.parameterization.parameter_core import Pickleable
from GPy.util.caching import Cache_this
from ....core.parameterization import variational
import rbf_psi_comp
import ssrbf_psi_comp
import sslinear_psi_comp
import linear_psi_comp
class PSICOMP_RBF(Pickleable):
@Cache_this(limit=2, ignore_args=(0,))
def psicomputations(self, variance, lengthscale, Z, variational_posterior):
if isinstance(variational_posterior, variational.NormalPosterior):
return rbf_psi_comp.psicomputations(variance, lengthscale, Z, variational_posterior)
elif isinstance(variational_posterior, variational.SpikeAndSlabPosterior):
return ssrbf_psi_comp.psicomputations(variance, lengthscale, Z, variational_posterior)
else:
raise ValueError, "unknown distriubtion received for psi-statistics"
@Cache_this(limit=2, ignore_args=(0,1,2,3))
def psiDerivativecomputations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, variance, lengthscale, Z, variational_posterior):
if isinstance(variational_posterior, variational.NormalPosterior):
return rbf_psi_comp.psiDerivativecomputations(dL_dpsi0, dL_dpsi1, dL_dpsi2, variance, lengthscale, Z, variational_posterior)
elif isinstance(variational_posterior, variational.SpikeAndSlabPosterior):
return ssrbf_psi_comp.psiDerivativecomputations(dL_dpsi0, dL_dpsi1, dL_dpsi2, variance, lengthscale, Z, variational_posterior)
else:
raise ValueError, "unknown distriubtion received for psi-statistics"
def _setup_observers(self):
pass
class PSICOMP_Linear(Pickleable):
@Cache_this(limit=2, ignore_args=(0,))
def psicomputations(self, variance, Z, variational_posterior):
if isinstance(variational_posterior, variational.NormalPosterior):
return linear_psi_comp.psicomputations(variance, Z, variational_posterior)
elif isinstance(variational_posterior, variational.SpikeAndSlabPosterior):
return sslinear_psi_comp.psicomputations(variance, Z, variational_posterior)
else:
raise ValueError, "unknown distriubtion received for psi-statistics"
@Cache_this(limit=2, ignore_args=(0,1,2,3))
def psiDerivativecomputations(self, dL_dpsi0, dL_dpsi1, dL_dpsi2, variance, Z, variational_posterior):
if isinstance(variational_posterior, variational.NormalPosterior):
return linear_psi_comp.psiDerivativecomputations(dL_dpsi0, dL_dpsi1, dL_dpsi2, variance, Z, variational_posterior)
elif isinstance(variational_posterior, variational.SpikeAndSlabPosterior):
return sslinear_psi_comp.psiDerivativecomputations(dL_dpsi0, dL_dpsi1, dL_dpsi2, variance, Z, variational_posterior)
else:
raise ValueError, "unknown distriubtion received for psi-statistics"
def _setup_observers(self):
pass
| 54.381818
| 138
| 0.760615
| 331
| 2,991
| 6.637462
| 0.205438
| 0.182066
| 0.114702
| 0.149294
| 0.855712
| 0.855712
| 0.855712
| 0.842057
| 0.828402
| 0.828402
| 0
| 0.014794
| 0.163825
| 2,991
| 55
| 139
| 54.381818
| 0.863655
| 0.036108
| 0
| 0.533333
| 0
| 0
| 0.066644
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.044444
| 0.155556
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a27d06552f2bfc1bd33158adeb294fc41deefbbf
| 322
|
py
|
Python
|
sira/__init__.py
|
GeoscienceAustralia/sira
|
01c99acae497fedc971367e0fa1611a90f5c64ef
|
[
"Apache-2.0"
] | 1
|
2021-11-17T16:10:56.000Z
|
2021-11-17T16:10:56.000Z
|
sira/__init__.py
|
GeoscienceAustralia/sira
|
01c99acae497fedc971367e0fa1611a90f5c64ef
|
[
"Apache-2.0"
] | 10
|
2020-07-28T02:19:22.000Z
|
2022-03-04T00:34:40.000Z
|
sira/__init__.py
|
GeoscienceAustralia/sira
|
01c99acae497fedc971367e0fa1611a90f5c64ef
|
[
"Apache-2.0"
] | 1
|
2019-08-22T10:26:38.000Z
|
2019-08-22T10:26:38.000Z
|
from sira.__about__ import (
__packagename__,
__description__,
__url__,
__version__,
__author__,
__email__,
__license__,
__copyright__
)
__all__ = [
__packagename__,
__description__,
__url__,
__version__,
__author__,
__email__,
__license__,
__copyright__
]
| 14.636364
| 28
| 0.658385
| 21
| 322
| 6.666667
| 0.619048
| 0.314286
| 0.357143
| 0.457143
| 0.842857
| 0.842857
| 0.842857
| 0.842857
| 0
| 0
| 0
| 0
| 0.279503
| 322
| 21
| 29
| 15.333333
| 0.603448
| 0
| 0
| 0.8
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.05
| 0
| 0.05
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
a2c31035e3a5e5a50495528df3bff937fa04916e
| 62
|
py
|
Python
|
cbt/code.py
|
mentor2182/prog30042020
|
e76410da72384cb504e449870995b0d38a012f36
|
[
"MIT"
] | null | null | null |
cbt/code.py
|
mentor2182/prog30042020
|
e76410da72384cb504e449870995b0d38a012f36
|
[
"MIT"
] | null | null | null |
cbt/code.py
|
mentor2182/prog30042020
|
e76410da72384cb504e449870995b0d38a012f36
|
[
"MIT"
] | null | null | null |
# --------------
print(bool)
# --------------
print(bool)
| 6.888889
| 16
| 0.290323
| 4
| 62
| 4.5
| 0.5
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 62
| 8
| 17
| 7.75
| 0.346154
| 0.467742
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
a2d08865cefed507ec460c20117048fb3cc964b5
| 175
|
py
|
Python
|
gym_matching/envs/__init__.py
|
mburq/gym-matching
|
277dc8832bd3878e1bd94c29893dcfc5278e9780
|
[
"MIT"
] | null | null | null |
gym_matching/envs/__init__.py
|
mburq/gym-matching
|
277dc8832bd3878e1bd94c29893dcfc5278e9780
|
[
"MIT"
] | null | null | null |
gym_matching/envs/__init__.py
|
mburq/gym-matching
|
277dc8832bd3878e1bd94c29893dcfc5278e9780
|
[
"MIT"
] | null | null | null |
from gym_matching.envs.matching import MatchingEnv
from gym_matching.envs.kidney_matching import KidneyMatchingEnv
from gym_matching.envs.taxi_matching import TaxiMatchingEnv
| 43.75
| 63
| 0.897143
| 23
| 175
| 6.608696
| 0.434783
| 0.138158
| 0.296053
| 0.375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.068571
| 175
| 3
| 64
| 58.333333
| 0.932515
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
a2eab7604e628c89de3e6a148162bc8f3ddf899c
| 42,909
|
py
|
Python
|
monk/pytorch_tests.py
|
take2rohit/monk_v1
|
9c567bf2c8b571021b120d879ba9edf7751b9f92
|
[
"Apache-2.0"
] | 542
|
2019-11-10T12:09:31.000Z
|
2022-03-28T11:39:07.000Z
|
monk/pytorch_tests.py
|
take2rohit/monk_v1
|
9c567bf2c8b571021b120d879ba9edf7751b9f92
|
[
"Apache-2.0"
] | 117
|
2019-11-12T09:39:24.000Z
|
2022-03-12T00:20:41.000Z
|
monk/pytorch_tests.py
|
take2rohit/monk_v1
|
9c567bf2c8b571021b120d879ba9edf7751b9f92
|
[
"Apache-2.0"
] | 246
|
2019-11-09T21:53:24.000Z
|
2022-03-29T00:57:07.000Z
|
import os
import sys
import time
from monk.pip_unit_tests.pytorch.test_optimizer_sgd import test_optimizer_sgd
from monk.pip_unit_tests.pytorch.test_optimizer_nesterov_sgd import test_optimizer_nesterov_sgd
from monk.pip_unit_tests.pytorch.test_optimizer_rmsprop import test_optimizer_rmsprop
from monk.pip_unit_tests.pytorch.test_optimizer_momentum_rmsprop import test_optimizer_momentum_rmsprop
from monk.pip_unit_tests.pytorch.test_optimizer_adam import test_optimizer_adam
from monk.pip_unit_tests.pytorch.test_optimizer_adamax import test_optimizer_adamax
from monk.pip_unit_tests.pytorch.test_optimizer_adamw import test_optimizer_adamw
from monk.pip_unit_tests.pytorch.test_optimizer_adadelta import test_optimizer_adadelta
from monk.pip_unit_tests.pytorch.test_optimizer_adagrad import test_optimizer_adagrad
from monk.pip_unit_tests.pytorch.test_loss_l1 import test_loss_l1
from monk.pip_unit_tests.pytorch.test_loss_l2 import test_loss_l2
from monk.pip_unit_tests.pytorch.test_loss_l2 import test_loss_l2
from monk.pip_unit_tests.pytorch.test_loss_softmax_crossentropy import test_loss_softmax_crossentropy
from monk.pip_unit_tests.pytorch.test_loss_crossentropy import test_loss_crossentropy
from monk.pip_unit_tests.pytorch.test_loss_sigmoid_binary_crossentropy import test_loss_sigmoid_binary_crossentropy
from monk.pip_unit_tests.pytorch.test_loss_binary_crossentropy import test_loss_binary_crossentropy
from monk.pip_unit_tests.pytorch.test_loss_kldiv import test_loss_kldiv
from monk.pip_unit_tests.pytorch.test_loss_poisson_nll import test_loss_poisson_nll
from monk.pip_unit_tests.pytorch.test_loss_huber import test_loss_huber
from monk.pip_unit_tests.pytorch.test_loss_hinge import test_loss_hinge
from monk.pip_unit_tests.pytorch.test_loss_squared_hinge import test_loss_squared_hinge
from monk.pip_unit_tests.pytorch.test_loss_multimargin import test_loss_multimargin
from monk.pip_unit_tests.pytorch.test_loss_squared_multimargin import test_loss_squared_multimargin
from monk.pip_unit_tests.pytorch.test_loss_multilabelmargin import test_loss_multilabelmargin
from monk.pip_unit_tests.pytorch.test_loss_multilabelsoftmargin import test_loss_multilabelsoftmargin
from monk.pip_unit_tests.pytorch.test_layer_convolution1d import test_layer_convolution1d
from monk.pip_unit_tests.pytorch.test_layer_convolution2d import test_layer_convolution2d
from monk.pip_unit_tests.pytorch.test_layer_convolution3d import test_layer_convolution3d
from monk.pip_unit_tests.pytorch.test_layer_transposed_convolution1d import test_layer_transposed_convolution1d
from monk.pip_unit_tests.pytorch.test_layer_transposed_convolution2d import test_layer_transposed_convolution2d
from monk.pip_unit_tests.pytorch.test_layer_transposed_convolution3d import test_layer_transposed_convolution3d
from monk.pip_unit_tests.pytorch.test_layer_max_pooling1d import test_layer_max_pooling1d
from monk.pip_unit_tests.pytorch.test_layer_max_pooling2d import test_layer_max_pooling2d
from monk.pip_unit_tests.pytorch.test_layer_max_pooling3d import test_layer_max_pooling3d
from monk.pip_unit_tests.pytorch.test_layer_average_pooling1d import test_layer_average_pooling1d
from monk.pip_unit_tests.pytorch.test_layer_average_pooling2d import test_layer_average_pooling2d
from monk.pip_unit_tests.pytorch.test_layer_average_pooling3d import test_layer_average_pooling3d
from monk.pip_unit_tests.pytorch.test_layer_global_max_pooling1d import test_layer_global_max_pooling1d
from monk.pip_unit_tests.pytorch.test_layer_global_max_pooling2d import test_layer_global_max_pooling2d
from monk.pip_unit_tests.pytorch.test_layer_global_max_pooling3d import test_layer_global_max_pooling3d
from monk.pip_unit_tests.pytorch.test_layer_global_average_pooling1d import test_layer_global_average_pooling1d
from monk.pip_unit_tests.pytorch.test_layer_global_average_pooling2d import test_layer_global_average_pooling2d
from monk.pip_unit_tests.pytorch.test_layer_global_average_pooling3d import test_layer_global_average_pooling3d
from monk.pip_unit_tests.pytorch.test_layer_batch_normalization import test_layer_batch_normalization
from monk.pip_unit_tests.pytorch.test_layer_instance_normalization import test_layer_instance_normalization
from monk.pip_unit_tests.pytorch.test_layer_layer_normalization import test_layer_layer_normalization
from monk.pip_unit_tests.pytorch.test_layer_identity import test_layer_identity
from monk.pip_unit_tests.pytorch.test_layer_fully_connected import test_layer_fully_connected
from monk.pip_unit_tests.pytorch.test_layer_dropout import test_layer_dropout
from monk.pip_unit_tests.pytorch.test_layer_flatten import test_layer_flatten
from monk.pip_unit_tests.pytorch.test_activation_relu import test_activation_relu
from monk.pip_unit_tests.pytorch.test_activation_sigmoid import test_activation_sigmoid
from monk.pip_unit_tests.pytorch.test_activation_tanh import test_activation_tanh
from monk.pip_unit_tests.pytorch.test_activation_softplus import test_activation_softplus
from monk.pip_unit_tests.pytorch.test_activation_softsign import test_activation_softsign
from monk.pip_unit_tests.pytorch.test_activation_elu import test_activation_elu
from monk.pip_unit_tests.pytorch.test_activation_leaky_relu import test_activation_leaky_relu
from monk.pip_unit_tests.pytorch.test_activation_prelu import test_activation_prelu
from monk.pip_unit_tests.pytorch.test_activation_selu import test_activation_selu
from monk.pip_unit_tests.pytorch.test_activation_hardshrink import test_activation_hardshrink
from monk.pip_unit_tests.pytorch.test_activation_hardtanh import test_activation_hardtanh
from monk.pip_unit_tests.pytorch.test_activation_logsigmoid import test_activation_logsigmoid
from monk.pip_unit_tests.pytorch.test_activation_relu6 import test_activation_relu6
from monk.pip_unit_tests.pytorch.test_activation_rrelu import test_activation_rrelu
from monk.pip_unit_tests.pytorch.test_activation_celu import test_activation_celu
from monk.pip_unit_tests.pytorch.test_activation_softshrink import test_activation_softshrink
from monk.pip_unit_tests.pytorch.test_activation_tanhshrink import test_activation_tanhshrink
from monk.pip_unit_tests.pytorch.test_activation_threshold import test_activation_threshold
from monk.pip_unit_tests.pytorch.test_activation_softmin import test_activation_softmin
from monk.pip_unit_tests.pytorch.test_activation_softmax import test_activation_softmax
from monk.pip_unit_tests.pytorch.test_activation_logsoftmax import test_activation_logsoftmax
from monk.pip_unit_tests.pytorch.test_layer_concatenate import test_layer_concatenate
from monk.pip_unit_tests.pytorch.test_layer_add import test_layer_add
from monk.pip_unit_tests.pytorch.test_block_resnet_v1 import test_block_resnet_v1
from monk.pip_unit_tests.pytorch.test_block_resnet_v2 import test_block_resnet_v2
from monk.pip_unit_tests.pytorch.test_block_resnet_v1_bottleneck import test_block_resnet_v1_bottleneck
from monk.pip_unit_tests.pytorch.test_block_resnet_v2_bottleneck import test_block_resnet_v2_bottleneck
from monk.pip_unit_tests.pytorch.test_block_resnext import test_block_resnext
from monk.pip_unit_tests.pytorch.test_block_mobilenet_v2_linear_bottleneck import test_block_mobilenet_v2_linear_bottleneck
from monk.pip_unit_tests.pytorch.test_block_mobilenet_v2_inverted_linear_bottleneck import test_block_mobilenet_v2_inverted_linear_bottleneck
from monk.pip_unit_tests.pytorch.test_block_squeezenet_fire import test_block_squeezenet_fire
from monk.pip_unit_tests.pytorch.test_block_densenet import test_block_densenet
from monk.pip_unit_tests.pytorch.test_block_conv_bn_relu import test_block_conv_bn_relu
from monk.pip_unit_tests.pytorch.test_block_inception_a import test_block_inception_a
from monk.pip_unit_tests.pytorch.test_block_inception_b import test_block_inception_b
from monk.pip_unit_tests.pytorch.test_block_inception_c import test_block_inception_c
from monk.pip_unit_tests.pytorch.test_block_inception_d import test_block_inception_d
from monk.pip_unit_tests.pytorch.test_block_inception_e import test_block_inception_e
from monk.pip_functionality_tests.pytorch.test_default_train import test_default_train
from monk.pip_functionality_tests.pytorch.test_default_eval_infer import test_default_eval_infer
from monk.pip_functionality_tests.pytorch.test_update_copy_from import test_update_copy_from
from monk.pip_functionality_tests.pytorch.test_update_normal import test_update_normal
from monk.pip_functionality_tests.pytorch.test_update_eval_infer import test_update_eval_infer
from monk.pip_functionality_tests.pytorch.test_expert_train import test_expert_train
from monk.pip_functionality_tests.pytorch.test_expert_eval_infer import test_expert_eval_infer
from monk.pip_functionality_tests.pytorch.test_switch_default import test_switch_default
from monk.pip_functionality_tests.pytorch.test_switch_expert import test_switch_expert
from monk.pip_functionality_tests.pytorch.test_compare import test_compare
from monk.pip_functionality_tests.pytorch.test_analyse import test_analyse
def run_functionality_tests():
origstdout = sys.stdout
print("Running Tests...");
sys.stdout = open("test_logs.txt", 'w');
system_dict = {};
system_dict["total_tests"] = 0;
system_dict["successful_tests"] = 0;
system_dict["failed_tests_lists"] = [];
system_dict["failed_tests_exceptions"] = [];
system_dict["skipped_tests_lists"] = [];
start = time.time()
print("Running 1/11");
system_dict = test_default_train(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 2/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_default_eval_infer(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 3/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_update_copy_from(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 4/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_update_normal(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 5/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_update_eval_infer(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 6/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_expert_train(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("Running 7/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_expert_eval_infer(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 8/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_switch_default(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 9/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_switch_expert(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 10/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_compare(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running 11/11");
sys.stdout = open("test_logs.txt", 'a');
system_dict = test_analyse(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
sys.stdout = open("test_logs.txt", 'a');
end = time.time();
print("Total Tests - {}".format(system_dict["total_tests"]));
print("Time Taken - {} sec".format(end-start));
print("Num Successful Tests - {}".format(system_dict["successful_tests"]));
print("Num Failed Tests - {}".format(len(system_dict["failed_tests_lists"])));
print("Num Skipped Tests - {}".format(len(system_dict["skipped_tests_lists"])));
print("");
for i in range(len(system_dict["failed_tests_lists"])):
print("{}. Failed Test:".format(i+1));
print("Name - {}".format(system_dict["failed_tests_lists"][i]));
print("Error - {}".format(system_dict["failed_tests_exceptions"][i]));
print("");
print("Skipped Tests List - {}".format(system_dict["skipped_tests_lists"]));
print("");
sys.stdout = origstdout;
print("Total Tests - {}".format(system_dict["total_tests"]));
print("Time Taken - {} sec".format(end-start));
print("Num Successful Tests - {}".format(system_dict["successful_tests"]));
print("Num Failed Tests - {}".format(len(system_dict["failed_tests_lists"])));
print("Num Skipped Tests - {}".format(len(system_dict["skipped_tests_lists"])));
print("See test_logs.txt for errors");
print("");
os.system("rm -r workspace");
def run_unit_tests():
origstdout = sys.stdout
print("Running Tests...");
sys.stdout = open("test_logs.txt", 'w');
system_dict = {};
system_dict["total_tests"] = 0;
system_dict["successful_tests"] = 0;
system_dict["failed_tests_lists"] = [];
system_dict["failed_tests_exceptions"] = [];
system_dict["skipped_tests_lists"] = [];
start = time.time()
exp_num = 1;
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_sgd(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_nesterov_sgd(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_rmsprop(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_adam(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_adamax(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_adamw(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_adadelta(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_optimizer_adagrad(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_l1(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_l2(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_softmax_crossentropy(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_crossentropy(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_sigmoid_binary_crossentropy(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_binary_crossentropy(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_kldiv(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_poisson_nll(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_huber(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_hinge(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_squared_hinge(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_multimargin(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_squared_multimargin(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_multilabelmargin(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_loss_multilabelsoftmargin(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_convolution1d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_convolution2d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_convolution3d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_transposed_convolution1d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_transposed_convolution2d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_transposed_convolution3d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_max_pooling1d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_max_pooling2d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_max_pooling3d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_average_pooling1d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_average_pooling2d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_average_pooling3d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_global_max_pooling1d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_global_max_pooling2d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_global_max_pooling3d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_global_average_pooling1d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_global_average_pooling2d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_global_average_pooling3d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_batch_normalization(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_instance_normalization(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_layer_normalization(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_identity(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_fully_connected(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_dropout(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_flatten(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_relu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_sigmoid(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_tanh(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_softplus(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_softsign(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_elu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_leaky_relu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_prelu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_selu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_hardshrink(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_hardtanh(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_logsigmoid(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_relu6(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_rrelu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_celu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_softshrink(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_tanhshrink(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_threshold(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_softmin(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_softmax(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_activation_logsoftmax(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_concatenate(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_layer_add(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_resnet_v1(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_resnet_v2(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_resnet_v1_bottleneck(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_resnet_v2_bottleneck(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_resnext(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_mobilenet_v2_linear_bottleneck(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_mobilenet_v2_inverted_linear_bottleneck(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_squeezenet_fire(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_densenet(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_conv_bn_relu(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_inception_a(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_inception_b(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_inception_c(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_inception_d(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
print("Running {}/<num>".format(exp_num));
exp_num += 1;
system_dict = test_block_inception_e(system_dict)
sys.stdout = origstdout;
print("Tests Completed - {}".format(system_dict["total_tests"]));
print("Tests Succesful - {}".format(system_dict["successful_tests"]));
print("")
sys.stdout = open("test_logs.txt", 'a');
end = time.time();
print("Total Tests - {}".format(system_dict["total_tests"]));
print("Time Taken - {} sec".format(end-start));
print("Num Successful Tests - {}".format(system_dict["successful_tests"]));
print("Num Failed Tests - {}".format(len(system_dict["failed_tests_lists"])));
print("Num Skipped Tests - {}".format(len(system_dict["skipped_tests_lists"])));
print("");
for i in range(len(system_dict["failed_tests_lists"])):
print("{}. Failed Test:".format(i+1));
print("Name - {}".format(system_dict["failed_tests_lists"][i]));
print("Error - {}".format(system_dict["failed_tests_exceptions"][i]));
print("");
print("Skipped Tests List - {}".format(system_dict["skipped_tests_lists"]));
print("");
sys.stdout = origstdout;
print("Total Tests - {}".format(system_dict["total_tests"]));
print("Time Taken - {} sec".format(end-start));
print("Num Successful Tests - {}".format(system_dict["successful_tests"]));
print("Num Failed Tests - {}".format(len(system_dict["failed_tests_lists"])));
print("Num Skipped Tests - {}".format(len(system_dict["skipped_tests_lists"])));
print("See test_logs.txt for errors");
print("");
os.system("rm -r workspace");
| 38.0062
| 141
| 0.708896
| 5,399
| 42,909
| 5.295425
| 0.030006
| 0.148304
| 0.116404
| 0.072053
| 0.898286
| 0.878979
| 0.878979
| 0.872403
| 0.811123
| 0.759391
| 0
| 0.005729
| 0.141649
| 42,909
| 1,129
| 142
| 38.0062
| 0.770519
| 0
| 0
| 0.751765
| 0
| 0
| 0.268492
| 0.002144
| 0
| 0
| 0
| 0
| 0
| 1
| 0.002353
| false
| 0
| 0.12
| 0
| 0.122353
| 0.502353
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
0c314f9d3b57a477a6881f6d7430bc699369c584
| 148,786
|
py
|
Python
|
sympy/integrals/rubi/rules/logarithms.py
|
tachycline/sympy
|
abf6fec12012852c7e6fae38461da9723cadc8b9
|
[
"BSD-3-Clause"
] | null | null | null |
sympy/integrals/rubi/rules/logarithms.py
|
tachycline/sympy
|
abf6fec12012852c7e6fae38461da9723cadc8b9
|
[
"BSD-3-Clause"
] | 1
|
2017-10-23T06:56:43.000Z
|
2017-10-23T06:56:43.000Z
|
sympy/integrals/rubi/rules/logarithms.py
|
tachycline/sympy
|
abf6fec12012852c7e6fae38461da9723cadc8b9
|
[
"BSD-3-Clause"
] | 1
|
2020-10-02T15:05:03.000Z
|
2020-10-02T15:05:03.000Z
|
from sympy.external import import_module
matchpy = import_module("matchpy")
from sympy.utilities.decorator import doctest_depends_on
if matchpy:
from matchpy import Pattern, ReplacementRule, CustomConstraint
from sympy.integrals.rubi.utility_function import (Int, Set, With, Module, Scan, MapAnd, FalseQ, ZeroQ, NegativeQ, NonzeroQ, FreeQ, NFreeQ, List, Log, PositiveQ, PositiveIntegerQ, NegativeIntegerQ, IntegerQ, IntegersQ, ComplexNumberQ, PureComplexNumberQ, RealNumericQ, PositiveOrZeroQ, NegativeOrZeroQ, FractionOrNegativeQ, NegQ, Equal, Unequal, IntPart, FracPart, RationalQ, ProductQ, SumQ, NonsumQ, Subst, First, Rest, SqrtNumberQ, SqrtNumberSumQ, LinearQ, Sqrt, ArcCosh, Coefficient, Denominator, Hypergeometric2F1, Not, Simplify, FractionalPart, IntegerPart, AppellF1, EllipticPi, EllipticE, EllipticF, ArcTan, ArcCot, ArcCoth, ArcTanh, ArcSin, ArcSinh, ArcCos, ArcCsc, ArcSec, ArcCsch, ArcSech, Sinh, Tanh, Cosh, Sech, Csch, Coth, LessEqual, Less, Greater, GreaterEqual, FractionQ, IntLinearcQ, Expand, IndependentQ, PowerQ, IntegerPowerQ, PositiveIntegerPowerQ, FractionalPowerQ, AtomQ, ExpQ, LogQ, Head, MemberQ, TrigQ, SinQ, CosQ, TanQ, CotQ, SecQ, CscQ, Sin, Cos, Tan, Cot, Sec, Csc, HyperbolicQ, SinhQ, CoshQ, TanhQ, CothQ, SechQ, CschQ, InverseTrigQ, SinCosQ, SinhCoshQ, LeafCount, Numerator, NumberQ, NumericQ, Length, ListQ, Im, Re, InverseHyperbolicQ, InverseFunctionQ, TrigHyperbolicFreeQ, InverseFunctionFreeQ, RealQ, EqQ, FractionalPowerFreeQ, ComplexFreeQ, PolynomialQ, FactorSquareFree, PowerOfLinearQ, Exponent, QuadraticQ, LinearPairQ, BinomialParts, TrinomialParts, PolyQ, EvenQ, OddQ, PerfectSquareQ, NiceSqrtAuxQ, NiceSqrtQ, Together, PosAux, PosQ, CoefficientList, ReplaceAll, ExpandLinearProduct, GCD, ContentFactor, NumericFactor, NonnumericFactors, MakeAssocList, GensymSubst, KernelSubst, ExpandExpression, Apart, SmartApart, MatchQ, PolynomialQuotientRemainder, FreeFactors, NonfreeFactors, RemoveContentAux, RemoveContent, FreeTerms, NonfreeTerms, ExpandAlgebraicFunction, CollectReciprocals, ExpandCleanup, AlgebraicFunctionQ, Coeff, LeadTerm, RemainingTerms, LeadFactor, RemainingFactors, LeadBase, LeadDegree, Numer, Denom, hypergeom, Expon, MergeMonomials, PolynomialDivide, BinomialQ, TrinomialQ, GeneralizedBinomialQ, GeneralizedTrinomialQ, FactorSquareFreeList, PerfectPowerTest, SquareFreeFactorTest, RationalFunctionQ, RationalFunctionFactors, NonrationalFunctionFactors, Reverse, RationalFunctionExponents, RationalFunctionExpand, ExpandIntegrand, SimplerQ, SimplerSqrtQ, SumSimplerQ, BinomialDegree, TrinomialDegree, CancelCommonFactors, SimplerIntegrandQ, GeneralizedBinomialDegree, GeneralizedBinomialParts, GeneralizedTrinomialDegree, GeneralizedTrinomialParts, MonomialQ, MonomialSumQ, MinimumMonomialExponent, MonomialExponent, LinearMatchQ, PowerOfLinearMatchQ, QuadraticMatchQ, CubicMatchQ, BinomialMatchQ, TrinomialMatchQ, GeneralizedBinomialMatchQ, GeneralizedTrinomialMatchQ, QuotientOfLinearsMatchQ, PolynomialTermQ, PolynomialTerms, NonpolynomialTerms, PseudoBinomialParts, NormalizePseudoBinomial, PseudoBinomialPairQ, PseudoBinomialQ, PolynomialGCD, PolyGCD, AlgebraicFunctionFactors, NonalgebraicFunctionFactors, QuotientOfLinearsP, QuotientOfLinearsParts, QuotientOfLinearsQ, Flatten, Sort, AbsurdNumberQ, AbsurdNumberFactors, NonabsurdNumberFactors, SumSimplerAuxQ, Prepend, Drop, CombineExponents, FactorInteger, FactorAbsurdNumber, SubstForInverseFunction, SubstForFractionalPower, SubstForFractionalPowerOfQuotientOfLinears, FractionalPowerOfQuotientOfLinears, SubstForFractionalPowerQ, SubstForFractionalPowerAuxQ, FractionalPowerOfSquareQ, FractionalPowerSubexpressionQ, Apply, FactorNumericGcd, MergeableFactorQ, MergeFactor, MergeFactors, TrigSimplifyQ, TrigSimplify, TrigSimplifyRecur, Order, FactorOrder, Smallest, OrderedQ, MinimumDegree, PositiveFactors, Sign, NonpositiveFactors, PolynomialInAuxQ, PolynomialInQ, ExponentInAux, ExponentIn, PolynomialInSubstAux, PolynomialInSubst, Distrib, DistributeDegree, FunctionOfPower, DivideDegreesOfFactors, MonomialFactor, FullSimplify, FunctionOfLinearSubst, FunctionOfLinear, NormalizeIntegrand, NormalizeIntegrandAux, NormalizeIntegrandFactor, NormalizeIntegrandFactorBase, NormalizeTogether, NormalizeLeadTermSigns, AbsorbMinusSign, NormalizeSumFactors, SignOfFactor, NormalizePowerOfLinear, SimplifyIntegrand, SimplifyTerm, TogetherSimplify, SmartSimplify, SubstForExpn, ExpandToSum, UnifySum, UnifyTerms, UnifyTerm, CalculusQ, FunctionOfInverseLinear, PureFunctionOfSinhQ, PureFunctionOfTanhQ, PureFunctionOfCoshQ, IntegerQuotientQ, OddQuotientQ, EvenQuotientQ, FindTrigFactor, FunctionOfSinhQ, FunctionOfCoshQ, OddHyperbolicPowerQ, FunctionOfTanhQ, FunctionOfTanhWeight, FunctionOfHyperbolicQ, SmartNumerator, SmartDenominator, SubstForAux, ActivateTrig, ExpandTrig, TrigExpand, SubstForTrig, SubstForHyperbolic, InertTrigFreeQ, LCM, SubstForFractionalPowerOfLinear, FractionalPowerOfLinear, InverseFunctionOfLinear, InertTrigQ, InertReciprocalQ, DeactivateTrig, FixInertTrigFunction, DeactivateTrigAux, PowerOfInertTrigSumQ, PiecewiseLinearQ, KnownTrigIntegrandQ, KnownSineIntegrandQ, KnownTangentIntegrandQ, KnownCotangentIntegrandQ, KnownSecantIntegrandQ, TryPureTanSubst, TryTanhSubst, TryPureTanhSubst, AbsurdNumberGCD, AbsurdNumberGCDList, ExpandTrigExpand, ExpandTrigReduce, ExpandTrigReduceAux, NormalizeTrig, TrigToExp, ExpandTrigToExp, TrigReduce, FunctionOfTrig, AlgebraicTrigFunctionQ, FunctionOfHyperbolic, FunctionOfQ, FunctionOfExpnQ, PureFunctionOfSinQ, PureFunctionOfCosQ, PureFunctionOfTanQ, PureFunctionOfCotQ, FunctionOfCosQ, FunctionOfSinQ, OddTrigPowerQ, FunctionOfTanQ, FunctionOfTanWeight, FunctionOfTrigQ, FunctionOfDensePolynomialsQ, FunctionOfLog, PowerVariableExpn, PowerVariableDegree, PowerVariableSubst, EulerIntegrandQ, FunctionOfSquareRootOfQuadratic, SquareRootOfQuadraticSubst, Divides, EasyDQ, ProductOfLinearPowersQ, Rt, NthRoot, AtomBaseQ, SumBaseQ, NegSumBaseQ, AllNegTermQ, SomeNegTermQ, TrigSquareQ, RtAux, TrigSquare, IntSum, IntTerm, Map2, ConstantFactor, SameQ, ReplacePart, CommonFactors, MostMainFactorPosition, FunctionOfExponentialQ, FunctionOfExponential, FunctionOfExponentialFunction, FunctionOfExponentialFunctionAux, FunctionOfExponentialTest, FunctionOfExponentialTestAux, stdev, rubi_test, If, IntQuadraticQ, IntBinomialQ, RectifyTangent, RectifyCotangent, Inequality, Condition, Simp, SimpHelp, SplitProduct, SplitSum, SubstFor, SubstForAux, FresnelS, FresnelC, Erfc, Erfi, Gamma, FunctionOfTrigOfLinearQ, ElementaryFunctionQ, Complex, UnsameQ, _SimpFixFactor, SimpFixFactor, _FixSimplify, FixSimplify, _SimplifyAntiderivativeSum, SimplifyAntiderivativeSum, _SimplifyAntiderivative, SimplifyAntiderivative, _TrigSimplifyAux, TrigSimplifyAux, Cancel, Part, PolyLog, D, Dist)
from sympy import Integral, S, sqrt
from sympy.integrals.rubi.symbol import WC
from sympy.core.symbol import symbols, Symbol
from sympy.functions import (log, sin, cos, tan, cot, csc, sec, sqrt, erf, exp, log)
from sympy.functions.elementary.hyperbolic import (acosh, asinh, atanh, acoth, acsch, asech, cosh, sinh, tanh, coth, sech, csch)
from sympy.functions.elementary.trigonometric import (atan, acsc, asin, acot, acos, asec)
A_, B_, C_, F_, G_, H_, a_, b_, c_, d_, e_, f_, g_, h_, i_, j_, k_, l_, m_, n_, p_, q_, r_, t_, u_, v_, s_, w_, x_, y_, z_ = [WC(i) for i in 'ABCFGHabcdefghijklmnpqrtuvswxyz']
a1_, a2_, b1_, b2_, c1_, c2_, d1_, d2_, n1_, n2_, e1_, e2_, f1_, f2_, g1_, g2_, n1_, n2_, n3_, Pq_, Pm_, Px_, Qm_, Qr_, Qx_, jn_, mn_, non2_, RFx_, RGx_ = [WC(i) for i in ['a1', 'a2', 'b1', 'b2', 'c1', 'c2', 'd1', 'd2', 'n1', 'n2', 'e1', 'e2', 'f1', 'f2', 'g1', 'g2', 'n1', 'n2', 'n3', 'Pq', 'Pm', 'Px', 'Qm', 'Qr', 'Qx', 'jn', 'mn', 'non2', 'RFx', 'RGx']]
_UseGamma = False
def logarithms(rubi):
pattern1 = Pattern(Integral(log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))), x_), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)))
rule1 = ReplacementRule(pattern1, lambda c, f, d, q, x, e, p : -p*q*x + (e + f*x)*log(c*(d*(e + f*x)**p)**q)/f)
rubi.add(rule1)
pattern2 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**n_, x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda n: RationalQ(n)), CustomConstraint(lambda n: Greater(n, S(0))))
rule2 = ReplacementRule(pattern2, lambda c, f, d, a, q, x, n, e, p, b : -b*n*p*q*Int((a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(-1)), x) + (a + b*log(c*(d*(e + f*x)**p)**q))**n*(e + f*x)/f)
rubi.add(rule2)
pattern3 = Pattern(Integral(1/log((x_*WC('f', S(1)) + WC('e', S(0)))*WC('d', S(1))), x_), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)))
rule3 = ReplacementRule(pattern3, lambda x, e, f, d : LogIntegral(d*(e + f*x))/(d*f))
rubi.add(rule3)
pattern4 = Pattern(Integral(1/(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)))
rule4 = ReplacementRule(pattern4, lambda c, f, d, a, q, x, e, p, b : (c*(d*(e + f*x)**p)**q)**(-S(1)/(p*q))*(e + f*x)*ExpIntegralEi((a + b*log(c*(d*(e + f*x)**p)**q))/(b*p*q))*exp(-a/(b*p*q))/(b*f*p*q))
rubi.add(rule4)
pattern5 = Pattern(Integral(1/sqrt(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda p, b, q: PosQ(b*p*q)))
rule5 = ReplacementRule(pattern5, lambda c, f, d, a, q, x, e, p, b : sqrt(Pi)*(c*(d*(e + f*x)**p)**q)**(-S(1)/(p*q))*(e + f*x)*Erfi(sqrt(a + b*log(c*(d*(e + f*x)**p)**q))/Rt(b*p*q, S(2)))*Rt(b*p*q, S(2))*exp(-a/(b*p*q))/(b*f*p*q))
rubi.add(rule5)
pattern6 = Pattern(Integral(1/sqrt(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda p, b, q: NegQ(b*p*q)))
rule6 = ReplacementRule(pattern6, lambda c, f, d, a, q, x, e, p, b : sqrt(Pi)*(c*(d*(e + f*x)**p)**q)**(-S(1)/(p*q))*(e + f*x)*Erf(sqrt(a + b*log(c*(d*(e + f*x)**p)**q))/Rt(-b*p*q, S(2)))*Rt(-b*p*q, S(2))*exp(-a/(b*p*q))/(b*f*p*q))
rubi.add(rule6)
pattern7 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**n_, x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda n: RationalQ(n)), CustomConstraint(lambda n: Less(n, S(-1))))
rule7 = ReplacementRule(pattern7, lambda c, f, d, a, q, x, n, e, p, b : -Int((a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(1)), x)/(b*p*q*(n + S(1))) + (a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(1))*(e + f*x)/(b*f*p*q*(n + S(1))))
rubi.add(rule7)
pattern8 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**n_, x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda n: Not(IntegerQ(S(2)*n))))
rule8 = ReplacementRule(pattern8, lambda c, f, d, a, q, x, n, e, p, b : (c*(d*(e + f*x)**p)**q)**(-S(1)/(p*q))*((-a - b*log(c*(d*(e + f*x)**p)**q))/(b*p*q))**(-n)*(a + b*log(c*(d*(e + f*x)**p)**q))**n*(e + f*x)*Gamma(n + S(1), (-a - b*log(c*(d*(e + f*x)**p)**q))/(b*p*q))*exp(-a/(b*p*q))/f)
rubi.add(rule8)
pattern9 = Pattern(Integral(S(1)/((x_*WC('h', S(1)) + WC('g', S(0)))*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: ZeroQ(-e*h + f*g)))
rule9 = ReplacementRule(pattern9, lambda g, c, f, d, a, q, x, e, h, p, b : log(RemoveContent(a + b*log(c*(d*(e + f*x)**p)**q), x))/(b*h*p*q))
rubi.add(rule9)
pattern10 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1))/(x_*WC('h', S(1)) + WC('g', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: ZeroQ(-e*h + f*g)), CustomConstraint(lambda n: NonzeroQ(n + S(1))))
rule10 = ReplacementRule(pattern10, lambda g, c, f, d, a, q, n, x, e, h, p, b : (a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(1))/(b*h*p*q*(n + S(1))))
rubi.add(rule10)
pattern11 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: ZeroQ(-e*h + f*g)), CustomConstraint(lambda m: NonzeroQ(m + S(1))), CustomConstraint(lambda n: RationalQ(n)), CustomConstraint(lambda n: Greater(n, S(0))))
rule11 = ReplacementRule(pattern11, lambda m, g, c, f, d, a, q, n, x, e, h, p, b : -b*n*p*q*Int((a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(-1))*(g + h*x)**m, x)/(m + S(1)) + (a + b*log(c*(d*(e + f*x)**p)**q))**n*(g + h*x)**(m + S(1))/(h*(m + S(1))))
rubi.add(rule11)
pattern12 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1))/log((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1))), x_), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda m, p: ZeroQ(m - p + S(1))), CustomConstraint(lambda g, e, f, h: ZeroQ(-e*h + f*g)), CustomConstraint(lambda p, f, h: IntegerQ(p) | PositiveQ(h/f)))
rule12 = ReplacementRule(pattern12, lambda m, g, f, x, d, e, h, p : (h/f)**(p + S(-1))*LogIntegral(d*(e + f*x)**p)/(d*f*p))
rubi.add(rule12)
pattern13 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))**m_/log((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1))), x_), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda m, p: ZeroQ(m - p + S(1))), CustomConstraint(lambda g, e, f, h: ZeroQ(-e*h + f*g)), CustomConstraint(lambda p, f, h: Not(IntegerQ(p) | PositiveQ(h/f))))
rule13 = ReplacementRule(pattern13, lambda m, g, f, x, d, e, h, p : (e + f*x)**(-p + S(1))*(g + h*x)**(p + S(-1))*Int((e + f*x)**(p + S(-1))/log(d*(e + f*x)**p), x))
rubi.add(rule13)
pattern14 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1))/(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: ZeroQ(-e*h + f*g)), CustomConstraint(lambda m: NonzeroQ(m + S(1))))
rule14 = ReplacementRule(pattern14, lambda m, g, c, f, d, a, q, x, e, h, p, b : (c*(d*(e + f*x)**p)**q)**(-(m + S(1))/(p*q))*(g + h*x)**(m + S(1))*ExpIntegralEi((a + b*log(c*(d*(e + f*x)**p)**q))*(m + S(1))/(b*p*q))*exp(-a*(m + S(1))/(b*p*q))/(b*h*p*q))
rubi.add(rule14)
pattern15 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1))/sqrt(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: ZeroQ(-e*h + f*g)), CustomConstraint(lambda m: NonzeroQ(m + S(1))), CustomConstraint(lambda m, p, b, q: PosQ((m + S(1))/(b*p*q))))
rule15 = ReplacementRule(pattern15, lambda m, g, c, f, d, a, q, x, e, h, p, b : sqrt(Pi)*(c*(d*(e + f*x)**p)**q)**(-(m + S(1))/(p*q))*(g + h*x)**(m + S(1))*Erfi(sqrt(a + b*log(c*(d*(e + f*x)**p)**q))*Rt((m + S(1))/(b*p*q), S(2)))*exp(-a*(m + S(1))/(b*p*q))/(b*h*p*q*Rt((m + S(1))/(b*p*q), S(2))))
rubi.add(rule15)
pattern16 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1))/sqrt(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: ZeroQ(-e*h + f*g)), CustomConstraint(lambda m: NonzeroQ(m + S(1))), CustomConstraint(lambda m, p, b, q: NegQ((m + S(1))/(b*p*q))))
rule16 = ReplacementRule(pattern16, lambda m, g, c, f, d, a, q, x, e, h, p, b : sqrt(Pi)*(c*(d*(e + f*x)**p)**q)**(-(m + S(1))/(p*q))*(g + h*x)**(m + S(1))*Erf(sqrt(a + b*log(c*(d*(e + f*x)**p)**q))*Rt((-m + S(-1))/(b*p*q), S(2)))*exp(-a*(m + S(1))/(b*p*q))/(b*h*p*q*Rt((-m + S(-1))/(b*p*q), S(2))))
rubi.add(rule16)
pattern17 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**n_, x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: ZeroQ(-e*h + f*g)), CustomConstraint(lambda m: NonzeroQ(m + S(1))), CustomConstraint(lambda n: RationalQ(n)), CustomConstraint(lambda n: Less(n, S(-1))))
rule17 = ReplacementRule(pattern17, lambda m, g, c, f, d, a, q, x, n, e, h, p, b : -(m + S(1))*Int((a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(1))*(g + h*x)**m, x)/(b*p*q*(n + S(1))) + (a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(1))*(g + h*x)**(m + S(1))/(b*h*p*q*(n + S(1))))
rubi.add(rule17)
pattern18 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda g, e, f, h: ZeroQ(-e*h + f*g)), CustomConstraint(lambda m: NonzeroQ(m + S(1))))
rule18 = ReplacementRule(pattern18, lambda m, g, c, f, d, a, q, n, x, e, h, p, b : (c*(d*(e + f*x)**p)**q)**(-(m + S(1))/(p*q))*((a + b*log(c*(d*(e + f*x)**p)**q))*(-m + S(-1))/(b*p*q))**(-n)*(a + b*log(c*(d*(e + f*x)**p)**q))**n*(g + h*x)**(m + S(1))*Gamma(n + S(1), (a + b*log(c*(d*(e + f*x)**p)**q))*(-m + S(-1))/(b*p*q))*exp(-a*(m + S(1))/(b*p*q))/(h*(m + S(1))))
rubi.add(rule18)
pattern19 = Pattern(Integral(log((x_*WC('f', S(1)) + WC('e', S(0)))*WC('c', S(1)))/(x_*WC('h', S(1)) + WC('g', S(0))), x_), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda g, c, f, e, h: ZeroQ(c*(-e*h + f*g) + h)))
rule19 = ReplacementRule(pattern19, lambda g, c, f, x, e, h : -PolyLog(S(2), -c*f*(g + h*x)/h)/h)
rubi.add(rule19)
pattern20 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log((x_*WC('f', S(1)) + WC('e', S(0)))*WC('c', S(1))))/(x_*WC('h', S(1)) + WC('g', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda g, c, f, e, h: NonzeroQ(c*(-e*h + f*g) + h)), CustomConstraint(lambda g, c, f, e, h: PositiveQ(c*(e - f*g/h))))
rule20 = ReplacementRule(pattern20, lambda g, c, f, a, x, e, h, b : b*Int(log(-h*(e + f*x)/(-e*h + f*g))/(g + h*x), x) + (a + b*log(c*(e - f*g/h)))*log(g + h*x)/h)
rubi.add(rule20)
pattern21 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1))/(x_*WC('h', S(1)) + WC('g', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: NonzeroQ(-e*h + f*g)), CustomConstraint(lambda n: PositiveIntegerQ(n)))
rule21 = ReplacementRule(pattern21, lambda g, c, f, d, a, q, n, x, e, h, p, b : -b*f*n*p*q*Int((a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(-1))*log(f*(g + h*x)/(-e*h + f*g))/(e + f*x), x)/h + (a + b*log(c*(d*(e + f*x)**p)**q))**n*log(f*(g + h*x)/(-e*h + f*g))/h)
rubi.add(rule21)
pattern22 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: NonzeroQ(-e*h + f*g)), CustomConstraint(lambda m: NonzeroQ(m + S(1))))
rule22 = ReplacementRule(pattern22, lambda m, g, c, f, d, a, q, x, e, h, p, b : -b*f*p*q*Int((g + h*x)**(m + S(1))/(e + f*x), x)/(h*(m + S(1))) + (a + b*log(c*(d*(e + f*x)**p)**q))*(g + h*x)**(m + S(1))/(h*(m + S(1))))
rubi.add(rule22)
pattern23 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**n_/(x_*WC('h', S(1)) + WC('g', S(0)))**S(2), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: NonzeroQ(-e*h + f*g)), CustomConstraint(lambda n: RationalQ(n)), CustomConstraint(lambda n: Greater(n, S(0))))
rule23 = ReplacementRule(pattern23, lambda g, c, f, d, a, q, x, n, e, h, p, b : -b*f*n*p*q*Int((a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(-1))/(g + h*x), x)/(-e*h + f*g) + (a + b*log(c*(d*(e + f*x)**p)**q))**n*(e + f*x)/((g + h*x)*(-e*h + f*g)))
rubi.add(rule23)
pattern24 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**n_, x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: NonzeroQ(-e*h + f*g)), CustomConstraint(lambda n: RationalQ(n)), CustomConstraint(lambda n: Greater(n, S(0))), CustomConstraint(lambda m: NonzeroQ(m + S(1))), CustomConstraint(lambda m, n: IntegersQ(S(2)*m, S(2)*n)), CustomConstraint(lambda m, n: Equal(n, S(1)) | Not(PositiveIntegerQ(m)) | (Equal(n, S(2)) & NonzeroQ(m + S(-1)))))
rule24 = ReplacementRule(pattern24, lambda m, g, c, f, d, a, q, x, n, e, h, p, b : -b*f*n*p*q*Int((a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(-1))*(g + h*x)**(m + S(1))/(e + f*x), x)/(h*(m + S(1))) + (a + b*log(c*(d*(e + f*x)**p)**q))**n*(g + h*x)**(m + S(1))/(h*(m + S(1))))
rubi.add(rule24)
pattern25 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1))/(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: NonzeroQ(-e*h + f*g)), CustomConstraint(lambda m: PositiveIntegerQ(m)))
rule25 = ReplacementRule(pattern25, lambda m, g, c, f, d, a, q, x, e, h, p, b : Int(ExpandIntegrand((g + h*x)**m/(a + b*log(c*(d*(e + f*x)**p)**q)), x), x))
rubi.add(rule25)
pattern26 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**n_, x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: NonzeroQ(-e*h + f*g)), CustomConstraint(lambda m, n: RationalQ(m, n)), CustomConstraint(lambda n: Less(n, S(-1))), CustomConstraint(lambda m: Greater(m, S(0))))
rule26 = ReplacementRule(pattern26, lambda m, g, c, f, d, a, q, x, n, e, h, p, b : -(m + S(1))*Int((a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(1))*(g + h*x)**m, x)/(b*p*q*(n + S(1))) + m*(-e*h + f*g)*Int((a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(1))*(g + h*x)**(m + S(-1)), x)/(b*f*p*q*(n + S(1))) + (a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(1))*(e + f*x)*(g + h*x)**m/(b*f*p*q*(n + S(1))))
rubi.add(rule26)
pattern27 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**n_, x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: NonzeroQ(-e*h + f*g)), CustomConstraint(lambda m: PositiveIntegerQ(m)))
rule27 = ReplacementRule(pattern27, lambda m, g, c, f, d, a, q, x, n, e, h, p, b : Int(ExpandIntegrand((a + b*log(c*(d*(e + f*x)**p)**q))**n*(g + h*x)**m, x), x))
rubi.add(rule27)
pattern28 = Pattern(Integral(u_**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log((v_**p_*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda x, v, u: LinearQ(List(u, v), x)), CustomConstraint(lambda x, v, u: Not(LinearMatchQ(List(u, v), x))))
rule28 = ReplacementRule(pattern28, lambda m, c, n, d, a, q, x, u, p, v, b : Int((a + b*log(c*(d*ExpandToSum(v, x)**p)**q))**n*ExpandToSum(u, x)**m, x))
rubi.add(rule28)
pattern29 = Pattern(Integral((x_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)))
rule29 = ReplacementRule(pattern29, lambda m, g, c, f, d, a, q, n, x, e, h, p, b : Int((a + b*log(c*(d*(e + f*x)**p)**q))**n*(g + h*x)**m, x))
rubi.add(rule29)
pattern30 = Pattern(Integral(log(WC('c', S(1))/(x_*WC('f', S(1)) + WC('e', S(0))))/((x_*WC('h', S(1)) + WC('g', S(0)))*(x_*WC('j', S(1)) + WC('i', S(0)))), x_), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda i, x: FreeQ(i, x)), CustomConstraint(lambda j, x: FreeQ(j, x)), CustomConstraint(lambda g, e, f, h: ZeroQ(-e*h + f*g)), CustomConstraint(lambda c, f, i, j, e: ZeroQ(f*i + j*(c - e))))
rule30 = ReplacementRule(pattern30, lambda g, c, f, i, x, j, e, h : f*PolyLog(S(2), f*(i + j*x)/(j*(e + f*x)))/(h*(-e*j + f*i)))
rubi.add(rule30)
pattern31 = Pattern(Integral((a_ + WC('b', S(1))*log(WC('c', S(1))/(x_*WC('f', S(1)) + WC('e', S(0)))))/((x_*WC('h', S(1)) + WC('g', S(0)))*(x_*WC('j', S(1)) + WC('i', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda i, x: FreeQ(i, x)), CustomConstraint(lambda j, x: FreeQ(j, x)), CustomConstraint(lambda g, e, f, h: ZeroQ(-e*h + f*g)), CustomConstraint(lambda c, f, i, j, e: ZeroQ(f*i + j*(c - e))))
rule31 = ReplacementRule(pattern31, lambda g, c, f, i, x, a, j, e, h, b : a*Int(S(1)/((g + h*x)*(i + j*x)), x) + b*Int(log(c/(e + f*x))/((g + h*x)*(i + j*x)), x))
rubi.add(rule31)
pattern32 = Pattern(Integral((x_*WC('j', S(1)) + WC('i', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))/(x_*WC('h', S(1)) + WC('g', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda i, x: FreeQ(i, x)), CustomConstraint(lambda j, x: FreeQ(j, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, h: ZeroQ(-e*h + f*g)), CustomConstraint(lambda m: IntegerQ(m + S(1)/2)), )
def With32(m, g, c, f, d, a, q, i, j, e, x, h, p, b):
u = IntHide((i + j*x)**m/(g + h*x), x)
return -b*h*p*q*Int(SimplifyIntegrand(u/(g + h*x), x), x) + Dist(a + b*log(c*(d*(e + f*x)**p)**q), u)
rule32 = ReplacementRule(pattern32, lambda m, g, c, f, d, a, q, i, j, e, x, h, p, b : With32(m, g, c, f, d, a, q, i, j, e, x, h, p, b))
rubi.add(rule32)
pattern33 = Pattern(Integral((x_*WC('j', S(1)) + WC('i', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log((x_*WC('f', S(1)) + WC('e', S(0)))*WC('c', S(1))))**WC('n', S(1))/(x_*WC('h', S(1)) + WC('g', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda i, x: FreeQ(i, x)), CustomConstraint(lambda j, x: FreeQ(j, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda g, e, f, h: ZeroQ(-e*h + f*g)), CustomConstraint(lambda m: PositiveIntegerQ(m)), CustomConstraint(lambda m, n: IntegerQ(n) | Greater(m, S(0))))
rule33 = ReplacementRule(pattern33, lambda m, g, c, f, a, n, i, j, x, e, h, b : c**(-m)*f**(-m)*Subst(Int((a + b*x)**n*(-c*e*j + c*f*i + j*exp(x))**m, x), x, log(c*(e + f*x)))/h)
rubi.add(rule33)
pattern34 = Pattern(Integral((x_*WC('j', S(1)) + WC('i', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1))/(x_*WC('h', S(1)) + WC('g', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda i, x: FreeQ(i, x)), CustomConstraint(lambda j, x: FreeQ(j, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda m: IntegerQ(m)), CustomConstraint(lambda n: PositiveIntegerQ(n)), CustomConstraint(lambda u, x: SumQ(u)))
def With34(m, g, c, f, d, a, q, n, i, e, j, x, h, p, b):
u = ExpandIntegrand((a + b*log(c*(d*(e + f*x)**p)**q))**n, (i + j*x)**m/(g + h*x), x)
return Int(u, x)
rule34 = ReplacementRule(pattern34, lambda m, g, c, f, d, a, q, n, i, e, j, x, h, p, b : With34(m, g, c, f, d, a, q, n, i, e, j, x, h, p, b))
rubi.add(rule34)
pattern35 = Pattern(Integral((x_*WC('j', S(1)) + WC('i', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1))/(x_*WC('h', S(1)) + WC('g', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda i, x: FreeQ(i, x)), CustomConstraint(lambda j, x: FreeQ(j, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)))
rule35 = ReplacementRule(pattern35, lambda m, g, c, f, d, a, q, n, i, e, j, x, h, p, b : Int((a + b*log(c*(d*(e + f*x)**p)**q))**n*(i + j*x)**m/(g + h*x), x))
rubi.add(rule35)
pattern36 = Pattern(Integral(log(WC('c', S(1))/(x_*WC('f', S(1)) + WC('e', S(0))))/(g_ + x_**S(2)*WC('h', S(1))), x_), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda g, e, f, h: ZeroQ(e**S(2)*h + f**S(2)*g)), CustomConstraint(lambda e, c: ZeroQ(c - S(2)*e)))
rule36 = ReplacementRule(pattern36, lambda g, x, c, f, e, h : -f*PolyLog(S(2), (-e + f*x)/(e + f*x))/(S(2)*e*h))
rubi.add(rule36)
pattern37 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(WC('c', S(1))/(x_*WC('f', S(1)) + WC('e', S(0)))))/(g_ + x_**S(2)*WC('h', S(1))), x_), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda g, e, f, h: ZeroQ(e**S(2)*h + f**S(2)*g)), CustomConstraint(lambda e, c: PositiveQ(c/(S(2)*e))), CustomConstraint(lambda e, c, a: NonzeroQ(a) | NonzeroQ(c - S(2)*e)))
rule37 = ReplacementRule(pattern37, lambda g, c, f, a, x, e, h, b : b*Int(log(S(2)*e/(e + f*x))/(g + h*x**S(2)), x) + (a + b*log(c/(S(2)*e)))*Int(1/(g + h*x**S(2)), x))
rubi.add(rule37)
pattern38 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))/(x_**S(2)*WC('i', S(1)) + x_*WC('h', S(1)) + WC('g', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda i, x: FreeQ(i, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, f, i, e, h: ZeroQ(e**S(2)*i - e*f*h + f**S(2)*g)))
rule38 = ReplacementRule(pattern38, lambda g, c, f, d, a, q, i, x, e, h, p, b : e*f*Int((a + b*log(c*(d*(e + f*x)**p)**q))/((e + f*x)*(e*i*x + f*g)), x))
rubi.add(rule38)
pattern39 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((e_ + x_*WC('f', S(1)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))/(g_ + x_**S(2)*WC('i', S(1))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda i, x: FreeQ(i, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g, e, f, i: ZeroQ(e**S(2)*i + f**S(2)*g)))
rule39 = ReplacementRule(pattern39, lambda g, c, f, d, a, q, i, x, e, p, b : e*f*Int((a + b*log(c*(d*(e + f*x)**p)**q))/((e + f*x)*(e*i*x + f*g)), x))
rubi.add(rule39)
pattern40 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))/sqrt(g_ + x_**S(2)*WC('h', S(1))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g: PositiveQ(g)), )
def With40(g, c, f, d, a, q, x, e, h, p, b):
u = IntHide(1/sqrt(g + h*x**S(2)), x)
return -b*f*p*q*Int(SimplifyIntegrand(u/(e + f*x), x), x) + u*(a + b*log(c*(d*(e + f*x)**p)**q))
rule40 = ReplacementRule(pattern40, lambda g, c, f, d, a, q, x, e, h, p, b : With40(g, c, f, d, a, q, x, e, h, p, b))
rubi.add(rule40)
pattern41 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))/(sqrt(g1_ + x_*WC('h1', S(1)))*sqrt(g2_ + x_*WC('h2', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g1, x: FreeQ(g1, x)), CustomConstraint(lambda h1, x: FreeQ(h1, x)), CustomConstraint(lambda g2, x: FreeQ(g2, x)), CustomConstraint(lambda h2, x: FreeQ(h2, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda h2, h1, g2, g1: ZeroQ(g1*h2 + g2*h1)), CustomConstraint(lambda g1: PositiveQ(g1)), CustomConstraint(lambda g2: PositiveQ(g2)), )
def With41(c, f, d, a, q, h1, x, e, p, g2, g1, b, h2):
u = IntHide(1/sqrt(g1*g2 + h1*h2*x**S(2)), x)
return -b*f*p*q*Int(SimplifyIntegrand(u/(e + f*x), x), x) + u*(a + b*log(c*(d*(e + f*x)**p)**q))
rule41 = ReplacementRule(pattern41, lambda c, f, d, a, q, h1, x, e, p, g2, g1, b, h2 : With41(c, f, d, a, q, h1, x, e, p, g2, g1, b, h2))
rubi.add(rule41)
pattern42 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))/sqrt(g_ + x_**S(2)*WC('h', S(1))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda g: Not(PositiveQ(g))))
rule42 = ReplacementRule(pattern42, lambda g, c, f, d, a, q, x, e, h, p, b : sqrt(S(1) + h*x**S(2)/g)*Int((a + b*log(c*(d*(e + f*x)**p)**q))/sqrt(S(1) + h*x**S(2)/g), x)/sqrt(g + h*x**S(2)))
rubi.add(rule42)
pattern43 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))/(sqrt(g1_ + x_*WC('h1', S(1)))*sqrt(g2_ + x_*WC('h2', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g1, x: FreeQ(g1, x)), CustomConstraint(lambda h1, x: FreeQ(h1, x)), CustomConstraint(lambda g2, x: FreeQ(g2, x)), CustomConstraint(lambda h2, x: FreeQ(h2, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda h2, h1, g2, g1: ZeroQ(g1*h2 + g2*h1)))
rule43 = ReplacementRule(pattern43, lambda c, f, d, a, q, h1, x, e, p, g2, g1, b, h2 : sqrt(S(1) + h1*h2*x**S(2)/(g1*g2))*Int((a + b*log(c*(d*(e + f*x)**p)**q))/sqrt(S(1) + h1*h2*x**S(2)/(g1*g2)), x)/(sqrt(g1 + h1*x)*sqrt(g2 + h2*x)))
rubi.add(rule43)
pattern44 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1))*log((x_*WC('k', S(1)) + WC('j', S(0)))*WC('i', S(1)))/(x_*WC('h', S(1)) + WC('g', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda i, x: FreeQ(i, x)), CustomConstraint(lambda j, x: FreeQ(j, x)), CustomConstraint(lambda k, x: FreeQ(k, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda n: RationalQ(n)), CustomConstraint(lambda n: Greater(n, S(0))), CustomConstraint(lambda g, k, i, j, h: ZeroQ(h - i*(-g*k + h*j))))
rule44 = ReplacementRule(pattern44, lambda g, k, c, i, a, d, f, j, q, e, n, x, h, p, b : b*f*n*p*q*Int((a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(-1))*PolyLog(S(2), -i*(j + k*x) + S(1))/(e + f*x), x)/h - (a + b*log(c*(d*(e + f*x)**p)**q))**n*PolyLog(S(2), -i*(j + k*x) + S(1))/h)
rubi.add(rule44)
pattern45 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1))*log((x_*WC('k', S(1)) + WC('j', S(0)))**WC('m', S(1))*WC('i', S(1)) + S(1))/(x_*WC('h', S(1)) + WC('g', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda i, x: FreeQ(i, x)), CustomConstraint(lambda j, x: FreeQ(j, x)), CustomConstraint(lambda k, x: FreeQ(k, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda n: RationalQ(n)), CustomConstraint(lambda n: Greater(n, S(0))), CustomConstraint(lambda j, g, k, h: ZeroQ(-g*k + h*j)))
rule45 = ReplacementRule(pattern45, lambda m, g, k, c, i, a, d, f, j, q, e, n, x, h, p, b : b*f*n*p*q*Int((a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(-1))*PolyLog(S(2), -i*(j + k*x)**m)/(e + f*x), x)/(h*m) - (a + b*log(c*(d*(e + f*x)**p)**q))**n*PolyLog(S(2), -i*(j + k*x)**m)/(h*m))
rubi.add(rule45)
pattern46 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1))*PolyLog(r_, (x_*WC('k', S(1)) + WC('j', S(0)))**WC('m', S(1))*WC('i', S(1)))/(x_*WC('h', S(1)) + WC('g', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda i, x: FreeQ(i, x)), CustomConstraint(lambda j, x: FreeQ(j, x)), CustomConstraint(lambda k, x: FreeQ(k, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda r, x: FreeQ(r, x)), CustomConstraint(lambda n: RationalQ(n)), CustomConstraint(lambda n: Greater(n, S(0))), CustomConstraint(lambda j, g, k, h: ZeroQ(-g*k + h*j)))
rule46 = ReplacementRule(pattern46, lambda m, g, k, c, f, d, a, q, n, i, e, j, x, h, p, r, b : -b*f*n*p*q*Int((a + b*log(c*(d*(e + f*x)**p)**q))**(n + S(-1))*PolyLog(r + S(1), i*(j + k*x)**m)/(e + f*x), x)/(h*m) + (a + b*log(c*(d*(e + f*x)**p)**q))**n*PolyLog(r + S(1), i*(j + k*x)**m)/(h*m))
rubi.add(rule46)
pattern47 = Pattern(Integral(F_**(x_*WC('h', S(1)) + WC('g', S(0)))*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))*WC('Px', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda x, Px: PolynomialQ(Px, x)), CustomConstraint(lambda m: PositiveIntegerQ(m)), CustomConstraint(lambda F: MemberQ(List(Log, ArcSin, ArcCos, ArcTan, ArcCot, ArcSinh, ArcCosh, ArcTanh, ArcCoth), F)), )
def With47(m, g, c, f, d, a, q, x, F, e, h, p, b, Px):
u = IntHide(Px*F(g + h*x)**m, x)
return -b*f*p*q*Int(SimplifyIntegrand(u/(e + f*x), x), x) + Dist(a + b*log(c*(d*(e + f*x)**p)**q), u, x)
rule47 = ReplacementRule(pattern47, lambda m, g, c, f, d, a, q, x, F, e, h, p, b, Px : With47(m, g, c, f, d, a, q, x, F, e, h, p, b, Px))
rubi.add(rule47)
pattern48 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((e_ + x_**m_*WC('f', S(1)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1))/x_, x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda n: PositiveIntegerQ(n)))
rule48 = ReplacementRule(pattern48, lambda m, c, f, d, a, q, n, x, e, p, b : Subst(Int((a + b*log(c*(d*(e + f*x)**p)**q))**n/x, x), x, x**m)/m)
rubi.add(rule48)
pattern49 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_**m_*(f_ + x_**WC('r', S(1))*WC('e', S(1))))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1))/x_, x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda m, r: ZeroQ(m + r)), CustomConstraint(lambda n: PositiveIntegerQ(n)))
rule49 = ReplacementRule(pattern49, lambda m, r, c, n, d, a, q, x, f, e, p, b : Subst(Int((a + b*log(c*(d*(e + f*x)**p)**q))**n/x, x), x, x**m)/m)
rubi.add(rule49)
pattern50 = Pattern(Integral(x_**WC('r1', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(((x_**r_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda r, x: FreeQ(r, x)), CustomConstraint(lambda r1, r: ZeroQ(-r + r1 + S(1))))
rule50 = ReplacementRule(pattern50, lambda r, c, f, d, a, q, n, x, e, p, r1, b : Subst(Int((a + b*log(c*(d*(e + f*x)**p)**q))**n, x), x, x**r)/r)
rubi.add(rule50)
pattern51 = Pattern(Integral(x_**WC('r1', S(1))*(x_**r_*WC('h', S(1)) + WC('g', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(((x_**r_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda r, x: FreeQ(r, x)), CustomConstraint(lambda r1, r: ZeroQ(-r + r1 + S(1))))
rule51 = ReplacementRule(pattern51, lambda m, g, r, c, f, d, a, q, n, x, e, h, p, r1, b : Subst(Int((a + b*log(c*(d*(e + f*x)**p)**q))**n*(g + h*x)**m, x), x, x**r)/r)
rubi.add(rule51)
pattern52 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(x_**WC('n', S(1))*WC('c', S(1))))/(d_ + x_**S(2)*WC('e', S(1))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), )
def With52(n, c, a, x, d, e, b):
u = IntHide(1/(d + e*x**S(2)), x)
return -b*n*Int(u/x, x) + Dist(a + b*log(c*x**n), u)
rule52 = ReplacementRule(pattern52, lambda n, c, a, x, d, e, b : With52(n, c, a, x, d, e, b))
rubi.add(rule52)
pattern53 = Pattern(Integral(log((x_**mn_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1)))/(x_*(d_ + x_**WC('n', S(1))*WC('e', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda mn, n: ZeroQ(mn + n)), CustomConstraint(lambda c, d, a, e, b: ZeroQ(-a*c*d + b*c*e + d)))
rule53 = ReplacementRule(pattern53, lambda n, c, a, x, d, e, mn, b : PolyLog(S(2), -b*c*x**(-n)*(d + e*x**n)/d)/(d*n))
rubi.add(rule53)
pattern54 = Pattern(Integral(log(x_**mn_*(x_**WC('n', S(1))*WC('a', S(1)) + WC('b', S(0)))*WC('c', S(1)))/(x_*(d_ + x_**WC('n', S(1))*WC('e', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda mn, n: ZeroQ(mn + n)), CustomConstraint(lambda c, d, a, e, b: ZeroQ(-a*c*d + b*c*e + d)))
rule54 = ReplacementRule(pattern54, lambda n, c, a, x, d, e, mn, b : PolyLog(S(2), -b*c*x**(-n)*(d + e*x**n)/d)/(d*n))
rubi.add(rule54)
pattern55 = Pattern(Integral(Px_*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda x, Px: PolynomialQ(Px, x)))
rule55 = ReplacementRule(pattern55, lambda c, f, d, a, q, n, x, e, p, b, Px : Int(ExpandIntegrand(Px*(a + b*log(c*(d*(e + f*x)**p)**q))**n, x), x))
rubi.add(rule55)
pattern56 = Pattern(Integral(RFx_*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda RFx, x: RationalFunctionQ(RFx, x)), CustomConstraint(lambda n: PositiveIntegerQ(n)), CustomConstraint(lambda u, x: SumQ(u)))
def With56(c, f, d, a, q, n, x, e, RFx, p, b):
u = ExpandIntegrand((a + b*log(c*(d*(e + f*x)**p)**q))**n, RFx, x)
return Int(u, x)
rule56 = ReplacementRule(pattern56, lambda c, f, d, a, q, n, x, e, RFx, p, b : With56(c, f, d, a, q, n, x, e, RFx, p, b))
rubi.add(rule56)
pattern57 = Pattern(Integral(RFx_*(WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda RFx, x: RationalFunctionQ(RFx, x)), CustomConstraint(lambda n: PositiveIntegerQ(n)), CustomConstraint(lambda u, x: SumQ(u)))
def With57(c, f, d, a, q, n, x, e, RFx, p, b):
u = ExpandIntegrand(RFx*(a + b*log(c*(d*(e + f*x)**p)**q))**n, x)
return Int(u, x)
rule57 = ReplacementRule(pattern57, lambda c, f, d, a, q, n, x, e, RFx, p, b : With57(c, f, d, a, q, n, x, e, RFx, p, b))
rubi.add(rule57)
pattern58 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_**S(2)*WC('g', S(1)) + x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1))*WC('u', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda g, e, f: ZeroQ(-S(4)*e*g + f**S(2))), CustomConstraint(lambda p: IntegerQ(p)))
rule58 = ReplacementRule(pattern58, lambda g, c, f, d, a, q, n, x, e, u, p, b : Int(u*(a + b*log(c*(S(4)**(-p)*d*g**(-p)*(f + S(2)*g*x)**(S(2)*p))**q))**n, x))
rubi.add(rule58)
pattern59 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log((v_**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**WC('n', S(1))*WC('u', S(1)), x_), CustomConstraint(lambda cc, x: FreeQ(cc, x)), CustomConstraint(lambda dd, x: FreeQ(dd, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda pp, x: FreeQ(pp, x)), CustomConstraint(lambda qq, x: FreeQ(qq, x)), CustomConstraint(lambda x, v: LinearQ(v, x)), CustomConstraint(lambda c, x, d, q, p, v: Not(MatchQ(c*(d*v**p)**q, Condition(((x*Optional(Pattern(f, Blank)) + Optional(Pattern(e, Blank)))**Optional(Pattern(pp, Blank))*Optional(Pattern(dd, Blank)))**Optional(Pattern(qq, Blank))*Optional(Pattern(cc, Blank)))))))
rule59 = ReplacementRule(pattern59, lambda c, n, d, a, q, x, u, p, v, b : Int(u*(a + b*log(c*(d*ExpandToSum(v, x)**p)**q))**n, x))
rubi.add(rule59)
pattern60 = Pattern(Integral(log(((x_**WC('n', S(1))*WC('c', S(1)))**p_*WC('b', S(1)))**q_*WC('a', S(1)))**WC('r', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda r, x: FreeQ(r, x)))
rule60 = ReplacementRule(pattern60, lambda r, n, c, a, x, q, p, b : Subst(Int(log(x**(n*p*q))**r, x), x**(n*p*q), a*(b*(c*x**n)**p)**q))
rubi.add(rule60)
pattern61 = Pattern(Integral(x_**WC('m', S(1))*log(((x_**WC('n', S(1))*WC('c', S(1)))**p_*WC('b', S(1)))**q_*WC('a', S(1)))**WC('r', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda r, x: FreeQ(r, x)), CustomConstraint(lambda m: NonzeroQ(m + S(1))), CustomConstraint(lambda n, x, q, a, c, p, b: Not(SameQ(x**(n*p*q), a*(b*(c*x**n)**p)**q))))
rule61 = ReplacementRule(pattern61, lambda m, r, n, c, a, x, q, p, b : Subst(Int(x**m*log(x**(n*p*q))**r, x), x**(n*p*q), a*(b*(c*x**n)**p)**q))
rubi.add(rule61)
pattern62 = Pattern(Integral(WC('u', S(1))*log(((x_*WC('b', S(1)) + WC('a', S(0)))*WC('e1', S(1))/(x_*WC('d', S(1)) + WC('c', S(0))))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda d, b, c, a: ZeroQ(-a*d + b*c)))
rule62 = ReplacementRule(pattern62, lambda c, n, e1, a, d, x, e, u, p, b : Int(u, x)*log(e*(b*e1/d)**n)**p)
rubi.add(rule62)
pattern63 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: PositiveIntegerQ(p)))
rule63 = ReplacementRule(pattern63, lambda c, n, e1, a, d, x, n2, n1, e, p, b : -n*n1*p*(-a*d + b*c)*Int(log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**(p + S(-1))/(c + d*x), x)/b + (a + b*x)*log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/b)
rubi.add(rule63)
pattern64 = Pattern(Integral(log((x_*WC('b', S(1)) + WC('a', S(0)))*WC('e', S(1))/(x_*WC('d', S(1)) + WC('c', S(0))))/(x_*WC('g', S(1)) + WC('f', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda c, g, f, d: ZeroQ(-c*g + d*f)), CustomConstraint(lambda b, e, d: ZeroQ(-b*e + d)))
rule64 = ReplacementRule(pattern64, lambda g, c, f, d, a, x, e, b : PolyLog(S(2), (-a*e + c)/(c + d*x))/g)
rubi.add(rule64)
pattern65 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1))/(x_*WC('g', S(1)) + WC('f', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda c, g, f, d: ZeroQ(-c*g + d*f)), CustomConstraint(lambda p: PositiveIntegerQ(p)))
rule65 = ReplacementRule(pattern65, lambda g, c, n, e1, a, d, f, x, n1, e, n2, p, b : n*n1*p*(-a*d + b*c)*Int(log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**(p + S(-1))*log((-a*d + b*c)/(b*(c + d*x)))/((a + b*x)*(c + d*x)), x)/g - log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p*log((-a*d + b*c)/(b*(c + d*x)))/g)
rubi.add(rule65)
pattern66 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1))/(x_*WC('g', S(1)) + WC('f', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda b, f, a, g: ZeroQ(-a*g + b*f)), CustomConstraint(lambda p: PositiveIntegerQ(p)))
rule66 = ReplacementRule(pattern66, lambda g, c, n, e1, a, d, f, x, n1, e, n2, p, b : n*n1*p*(-a*d + b*c)*Int(log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**(p + S(-1))*log((a*d - b*c)/(d*(a + b*x)))/((a + b*x)*(c + d*x)), x)/g - log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p*log((a*d - b*c)/(d*(a + b*x)))/g)
rubi.add(rule66)
pattern67 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))/(x_*WC('g', S(1)) + WC('f', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda c, g, f, d: NonzeroQ(-c*g + d*f)))
rule67 = ReplacementRule(pattern67, lambda g, c, n, e1, a, d, f, x, n1, e, n2, b : -n*n1*(-a*d + b*c)*Int(log(f + g*x)/((a + b*x)*(c + d*x)), x)/g + log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)*log(f + g*x)/g)
rubi.add(rule67)
pattern68 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**p_/(x_*WC('g', S(1)) + WC('f', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda c, g, f, d: NonzeroQ(-c*g + d*f)), CustomConstraint(lambda p: IntegerQ(p)), CustomConstraint(lambda p: Greater(p, S(1))))
rule68 = ReplacementRule(pattern68, lambda g, c, n, e1, a, d, f, x, n1, e, n2, p, b : d*Int(log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/(c + d*x), x)/g - (-c*g + d*f)*Int(log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/((c + d*x)*(f + g*x)), x)/g)
rubi.add(rule68)
pattern69 = Pattern(Integral(S(1)/((x_*WC('g', S(1)) + WC('f', S(0)))**S(2)*log((x_*WC('b', S(1)) + WC('a', S(0)))*WC('e', S(1))/(x_*WC('d', S(1)) + WC('c', S(0))))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda c, g, f, d: ZeroQ(-c*g + d*f)))
rule69 = ReplacementRule(pattern69, lambda g, c, f, d, a, x, e, b : d**S(2)*LogIntegral(e*(a + b*x)/(c + d*x))/(e*g**S(2)*(-a*d + b*c)))
rubi.add(rule69)
pattern70 = Pattern(Integral(S(1)/((x_*WC('g', S(1)) + WC('f', S(0)))**S(2)*log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda c, g, f, d: ZeroQ(-c*g + d*f)))
rule70 = ReplacementRule(pattern70, lambda g, c, n, e1, a, d, f, x, n1, e, n2, b : d**S(2)*(e*(e1*(a + b*x)**n1*(c + d*x)**n2)**n)**(-S(1)/(n*n1))*(a + b*x)*ExpIntegralEi(log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)/(n*n1))/(g**S(2)*n*n1*(c + d*x)*(-a*d + b*c)))
rubi.add(rule70)
pattern71 = Pattern(Integral(S(1)/((x_*WC('g', S(1)) + WC('f', S(0)))**S(2)*log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda b, f, a, g: ZeroQ(-a*g + b*f)))
rule71 = ReplacementRule(pattern71, lambda g, c, n, e1, a, d, f, x, n1, e, n2, b : b**S(2)*(e*(e1*(a + b*x)**n1*(c + d*x)**n2)**n)**(S(1)/(n*n1))*(c + d*x)*ExpIntegralEi(-log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)/(n*n1))/(g**S(2)*n*n1*(a + b*x)*(-a*d + b*c)))
rubi.add(rule71)
pattern72 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1))/(x_*WC('g', S(1)) + WC('f', S(0)))**S(2), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda b, f, a, g: NonzeroQ(-a*g + b*f)), CustomConstraint(lambda p: PositiveIntegerQ(p)))
rule72 = ReplacementRule(pattern72, lambda g, c, n, e1, a, d, f, x, n1, e, n2, p, b : -n*n1*p*(-a*d + b*c)*Int(log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**(p + S(-1))/((c + d*x)*(f + g*x)), x)/(-a*g + b*f) + (a + b*x)*log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/((f + g*x)*(-a*g + b*f)))
rubi.add(rule72)
pattern73 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1))/(x_*WC('g', S(1)) + WC('f', S(0)))**S(2), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda c, g, f, d: NonzeroQ(-c*g + d*f)), CustomConstraint(lambda p: PositiveIntegerQ(p)))
rule73 = ReplacementRule(pattern73, lambda g, c, n, e1, a, d, f, x, n1, e, n2, p, b : -n*n1*p*(-a*d + b*c)*Int(log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**(p + S(-1))/((a + b*x)*(f + g*x)), x)/(-c*g + d*f) + (c + d*x)*log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/((f + g*x)*(-c*g + d*f)))
rubi.add(rule73)
pattern74 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**p_/(x_*WC('g', S(1)) + WC('f', S(0)))**S(3), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda b, f, a, g: NonzeroQ(-a*g + b*f)), CustomConstraint(lambda c, g, f, d: ZeroQ(-c*g + d*f)))
rule74 = ReplacementRule(pattern74, lambda g, c, n, e1, a, d, f, x, n1, e, n2, p, b : b*Int(log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/(f + g*x)**S(2), x)/(-a*g + b*f) - g*Int((a + b*x)*log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/(f + g*x)**S(3), x)/(-a*g + b*f))
rubi.add(rule74)
pattern75 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**p_/(x_*WC('g', S(1)) + WC('f', S(0)))**S(3), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda c, g, f, d: NonzeroQ(-c*g + d*f)), CustomConstraint(lambda b, f, a, g: ZeroQ(-a*g + b*f)))
rule75 = ReplacementRule(pattern75, lambda g, c, n, e1, a, d, f, x, n1, e, n2, p, b : d*Int(log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/(f + g*x)**S(2), x)/(-c*g + d*f) - g*Int((c + d*x)*log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/(f + g*x)**S(3), x)/(-c*g + d*f))
rubi.add(rule75)
pattern76 = Pattern(Integral((x_*WC('g', S(1)) + WC('f', S(0)))**WC('m', S(1))*log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: PositiveIntegerQ(p)), CustomConstraint(lambda m: IntegerQ(m)), CustomConstraint(lambda m: NonzeroQ(m + S(1))))
rule76 = ReplacementRule(pattern76, lambda m, g, c, n, e1, a, d, f, x, n1, e, n2, p, b : -n*n1*p*(-a*d + b*c)*Int((f + g*x)**(m + S(1))*log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**(p + S(-1))/((a + b*x)*(c + d*x)), x)/(g*(m + S(1))) + (f + g*x)**(m + S(1))*log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/(g*(m + S(1))))
rubi.add(rule76)
pattern77 = Pattern(Integral((x_*WC('b', S(1)) + WC('a', S(0)))**WC('m', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m2', S(1))*log(u_**n_*WC('e', S(1)))**WC('p', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda x, c, a, d, u, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda m, m2: ZeroQ(m + m2 + S(2))), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda m: NonzeroQ(m + S(1))), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Greater(p, S(0))))
rule77 = ReplacementRule(pattern77, lambda m, c, n, a, d, x, e, u, p, m2, b : -n*p*Int((a + b*x)**m*(c + d*x)**(-m + S(-2))*log(e*u**n)**(p + S(-1)), x)/(m + S(1)) + (a + b*x)**(m + S(1))*(c + d*x)**(-m + S(-1))*log(e*u**n)**p/((m + S(1))*(-a*d + b*c)))
rubi.add(rule77)
pattern78 = Pattern(Integral((x_*WC('b', S(1)) + WC('a', S(0)))**WC('m', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m2', S(1))*log(u_)**WC('p', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda x, c, a, d, u, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda m, m2: ZeroQ(m + m2 + S(2))), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda m: NonzeroQ(m + S(1))), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Greater(p, S(0))))
rule78 = ReplacementRule(pattern78, lambda m, c, x, a, d, u, p, m2, b : -p*Int((a + b*x)**m*(c + d*x)**(-m + S(-2))*log(u)**(p + S(-1)), x)/(m + S(1)) + (a + b*x)**(m + S(1))*(c + d*x)**(-m + S(-1))*log(u)**p/((m + S(1))*(-a*d + b*c)))
rubi.add(rule78)
pattern79 = Pattern(Integral((x_*WC('b', S(1)) + WC('a', S(0)))**WC('m', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m2', S(1))/log(u_**n_*WC('e', S(1))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda x, c, a, d, u, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda m, m2: ZeroQ(m + m2 + S(2))), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda m: NonzeroQ(m + S(1))))
rule79 = ReplacementRule(pattern79, lambda m, c, n, a, d, x, e, u, m2, b : (e*u**n)**(-(m + S(1))/n)*(a + b*x)**(m + S(1))*(c + d*x)**(-m + S(-1))*ExpIntegralEi((m + S(1))*log(e*u**n)/n)/(n*(-a*d + b*c)))
rubi.add(rule79)
pattern80 = Pattern(Integral((x_*WC('b', S(1)) + WC('a', S(0)))**WC('m', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m2', S(1))/log(u_), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda x, c, a, d, u, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda m, m2: ZeroQ(m + m2 + S(2))), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda m: NonzeroQ(m + S(1))))
rule80 = ReplacementRule(pattern80, lambda m, c, x, d, a, u, m2, b : u**(-m + S(-1))*(a + b*x)**(m + S(1))*(c + d*x)**(-m + S(-1))*ExpIntegralEi((m + S(1))*log(u))/(-a*d + b*c))
rubi.add(rule80)
pattern81 = Pattern(Integral((x_*WC('b', S(1)) + WC('a', S(0)))**WC('m', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m2', S(1))*log(u_**n_*WC('e', S(1)))**WC('p', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda x, c, a, d, u, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda m, m2: ZeroQ(m + m2 + S(2))), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda m: NonzeroQ(m + S(1))), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Less(p, S(-1))))
rule81 = ReplacementRule(pattern81, lambda m, c, n, a, d, x, e, u, p, m2, b : (a + b*x)**(m + S(1))*(c + d*x)**(-m + S(-1))*log(e*u**n)**(p + S(1))/(n*(p + S(1))*(-a*d + b*c)) - (m + S(1))*Int((a + b*x)**m*(c + d*x)**(-m + S(-2))*log(e*u**n)**(p + S(1)), x)/(n*(p + S(1))))
rubi.add(rule81)
pattern82 = Pattern(Integral((x_*WC('b', S(1)) + WC('a', S(0)))**WC('m', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**WC('m2', S(1))*log(u_)**WC('p', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda x, c, a, d, u, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda m, m2: ZeroQ(m + m2 + S(2))), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda m: NonzeroQ(m + S(1))), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Less(p, S(-1))))
rule82 = ReplacementRule(pattern82, lambda m, c, x, a, d, u, p, m2, b : (a + b*x)**(m + S(1))*(c + d*x)**(-m + S(-1))*log(u)**(p + S(1))/((p + S(1))*(-a*d + b*c)) - (m + S(1))*Int((a + b*x)**m*(c + d*x)**(-m + S(-2))*log(u)**(p + S(1)), x)/(p + S(1)))
rubi.add(rule82)
pattern83 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1))/((x_*WC('d', S(1)) + WC('c', S(0)))*(x_*WC('g', S(1)) + WC('f', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda b, f, a, g: NonzeroQ(-a*g + b*f)), CustomConstraint(lambda c, g, f, d: ZeroQ(-c*g + d*f)))
rule83 = ReplacementRule(pattern83, lambda g, c, n, e1, a, d, f, x, n1, e, n2, p, b : d*Int(log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/(c + d*x)**S(2), x)/g)
rubi.add(rule83)
pattern84 = Pattern(Integral(log((x_*WC('b', S(1)) + WC('a', S(0)))*WC('e', S(1))/(x_*WC('d', S(1)) + WC('c', S(0))))/((x_*WC('d', S(1)) + WC('c', S(0)))*(x_*WC('g', S(1)) + WC('f', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda b, f, a, g: NonzeroQ(-a*g + b*f)), CustomConstraint(lambda c, g, f, d: NonzeroQ(-c*g + d*f)), CustomConstraint(lambda g, c, f, d, a, e, b: ZeroQ(-c*g + d*f - e*(-a*g + b*f))))
rule84 = ReplacementRule(pattern84, lambda g, c, f, d, a, x, e, b : PolyLog(S(2), -(f + g*x)*(a*e - c)/(f*(c + d*x)))/(-c*g + d*f))
rubi.add(rule84)
pattern85 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1))/((x_*WC('d', S(1)) + WC('c', S(0)))*(x_*WC('g', S(1)) + WC('f', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda b, f, a, g: NonzeroQ(-a*g + b*f)), CustomConstraint(lambda c, g, f, d: NonzeroQ(-c*g + d*f)), CustomConstraint(lambda p: PositiveIntegerQ(p)))
rule85 = ReplacementRule(pattern85, lambda g, c, n, e1, a, d, f, x, n1, e, n2, p, b : n*n1*p*(-a*d + b*c)*Int(log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**(p + S(-1))*log((f + g*x)*(-a*d + b*c)/((c + d*x)*(-a*g + b*f)))/((a + b*x)*(c + d*x)), x)/(-c*g + d*f) - log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p*log((f + g*x)*(-a*d + b*c)/((c + d*x)*(-a*g + b*f)))/(-c*g + d*f))
rubi.add(rule85)
pattern86 = Pattern(Integral(log((x_*WC('b', S(1)) + WC('a', S(0)))*WC('e', S(1))/(x_*WC('d', S(1)) + WC('c', S(0))))/(f_ + x_**S(2)*WC('g', S(1))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda c, g, f, d: ZeroQ(c**S(2)*g + d**S(2)*f)), CustomConstraint(lambda c, d, a, e, b: ZeroQ(-a*d*e - b*c*e + S(2)*c*d)))
rule86 = ReplacementRule(pattern86, lambda g, c, x, d, a, f, e, b : c*PolyLog(S(2), -(c - d*x)*(a*e - c)/(c*(c + d*x)))/(S(2)*d*f))
rubi.add(rule86)
pattern87 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1))/(x_**S(2)*WC('h', S(1)) + x_*WC('g', S(1)) + WC('f', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda g, c, f, d, h: ZeroQ(c**S(2)*h - c*d*g + d**S(2)*f)))
rule87 = ReplacementRule(pattern87, lambda g, c, n, e1, a, d, f, x, n1, e, n2, h, p, b : d**S(2)*Int(log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/((c + d*x)*(-c*h + d*g + d*h*x)), x))
rubi.add(rule87)
pattern88 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1))/(x_**S(2)*WC('h', S(1)) + WC('f', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda c, f, h, d: ZeroQ(c**S(2)*h + d**S(2)*f)))
rule88 = ReplacementRule(pattern88, lambda c, n, e1, a, d, f, x, n1, e, n2, h, p, b : -d**S(2)*Int(log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/((c - d*x)*(c + d*x)), x)/h)
rubi.add(rule88)
pattern89 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1))/((x_*WC('d', S(1)) + WC('c', S(0)))*(x_*WC('g', S(1)) + WC('f', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda c, g, f, d: NonzeroQ(-c*g + d*f)), CustomConstraint(lambda b, f, a, g: ZeroQ(-a*g + b*f)))
rule89 = ReplacementRule(pattern89, lambda g, c, n, e1, a, d, f, x, n1, e, n2, p, b : b*Subst(Int(x**p, x), x, log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n))/(g*n*n1*(-a*d + b*c)))
rubi.add(rule89)
pattern90 = Pattern(Integral(log(v_)*log(u_**n_*WC('e', S(1)))**WC('p', S(1))/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Pow(Add(S(1), Mul(S(-1), v)), S(-1)))), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Greater(p, S(0))))
rule90 = ReplacementRule(pattern90, lambda c, n, a, d, x, e, u, p, v, b : n*p*Int(PolyLog(S(2), -v + S(1))*log(e*u**n)**(p + S(-1))/((a + b*x)*(c + d*x)), x) - PolyLog(S(2), -v + S(1))*log(e*u**n)**p/(-a*d + b*c))
rubi.add(rule90)
pattern91 = Pattern(Integral(log(u_)**WC('p', S(1))*log(v_)/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Pow(Add(S(1), Mul(S(-1), v)), S(-1)))), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Greater(p, S(0))))
rule91 = ReplacementRule(pattern91, lambda c, x, d, a, u, p, v, b : p*Int(PolyLog(S(2), -v + S(1))*log(u)**(p + S(-1))/((a + b*x)*(c + d*x)), x) - PolyLog(S(2), -v + S(1))*log(u)**p/(-a*d + b*c))
rubi.add(rule91)
pattern92 = Pattern(Integral(log(v_)*log(u_**n_*WC('e', S(1)))**p_/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Pow(Add(S(1), Mul(S(-1), v)), S(-1)))), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Less(p, S(-1))), )
def With92(c, n, d, a, x, e, u, p, v, b):
f = (-v + S(1))/u
return f*Int(log(e*u**n)**(p + S(1))/((c + d*x)*(-a*f - b*f + c + d)), x)/(n*(p + S(1))) + log(v)*log(e*u**n)**(p + S(1))/(n*(p + S(1))*(-a*d + b*c))
rule92 = ReplacementRule(pattern92, lambda c, n, d, a, x, e, u, p, v, b : With92(c, n, d, a, x, e, u, p, v, b))
rubi.add(rule92)
pattern93 = Pattern(Integral(log(u_)**p_*log(v_)/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Pow(Add(S(1), Mul(S(-1), v)), S(-1)))), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Less(p, S(-1))), )
def With93(c, x, d, a, u, p, v, b):
f = (-v + S(1))/u
return f*Int(log(u)**(p + S(1))/((c + d*x)*(-a*f - b*f + c + d)), x)/(p + S(1)) + log(u)**(p + S(1))*log(v)/((p + S(1))*(-a*d + b*c))
rule93 = ReplacementRule(pattern93, lambda c, x, d, a, u, p, v, b : With93(c, x, d, a, u, p, v, b))
rubi.add(rule93)
pattern94 = Pattern(Integral(log(v_)*log(u_**n_*WC('e', S(1)))**WC('p', S(1))/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Add(S(1), Mul(S(-1), v)))), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Greater(p, S(0))))
rule94 = ReplacementRule(pattern94, lambda c, n, a, d, x, e, u, p, v, b : -n*p*Int(PolyLog(S(2), -v + S(1))*log(e*u**n)**(p + S(-1))/((a + b*x)*(c + d*x)), x) + PolyLog(S(2), -v + S(1))*log(e*u**n)**p/(-a*d + b*c))
rubi.add(rule94)
pattern95 = Pattern(Integral(log(u_)**WC('p', S(1))*log(v_)/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Add(S(1), Mul(S(-1), v)))), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Greater(p, S(0))))
rule95 = ReplacementRule(pattern95, lambda c, x, d, a, u, p, v, b : -p*Int(PolyLog(S(2), -v + S(1))*log(u)**(p + S(-1))/((a + b*x)*(c + d*x)), x) + PolyLog(S(2), -v + S(1))*log(u)**p/(-a*d + b*c))
rubi.add(rule95)
pattern96 = Pattern(Integral(log(v_)*log(u_**n_*WC('e', S(1)))**p_/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Add(S(1), Mul(S(-1), v)))), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Less(p, S(-1))), )
def With96(c, n, d, a, x, e, u, p, v, b):
f = u*(-v + S(1))
return -f*Int(log(e*u**n)**(p + S(1))/((a + b*x)*(a + b - c*f - d*f)), x)/(n*(p + S(1))) + log(v)*log(e*u**n)**(p + S(1))/(n*(p + S(1))*(-a*d + b*c))
rule96 = ReplacementRule(pattern96, lambda c, n, d, a, x, e, u, p, v, b : With96(c, n, d, a, x, e, u, p, v, b))
rubi.add(rule96)
pattern97 = Pattern(Integral(log(u_)**p_*log(v_)/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Add(S(1), Mul(S(-1), v)))), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Less(p, S(-1))), )
def With97(c, x, d, a, u, p, v, b):
f = u*(-v + S(1))
return -f*Int(log(u)**(p + S(1))/((a + b*x)*(a + b - c*f - d*f)), x)/(p + S(1)) + log(u)**(p + S(1))*log(v)/((p + S(1))*(-a*d + b*c))
rule97 = ReplacementRule(pattern97, lambda c, x, d, a, u, p, v, b : With97(c, x, d, a, u, p, v, b))
rubi.add(rule97)
pattern98 = Pattern(Integral(PolyLog(q_, v_)*log(u_**n_*WC('e', S(1)))**p_/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Pow(v, S(-1)))), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Greater(p, S(1))))
rule98 = ReplacementRule(pattern98, lambda c, n, d, a, x, q, e, u, p, v, b : -n*p*Int(PolyLog(q + S(1), v)*log(e*u**n)**(p + S(-1))/((a + b*x)*(c + d*x)), x) + PolyLog(q + S(1), v)*log(e*u**n)**p/(-a*d + b*c))
rubi.add(rule98)
pattern99 = Pattern(Integral(PolyLog(q_, v_)*log(u_)**p_/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Pow(v, S(-1)))), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Greater(p, S(1))))
rule99 = ReplacementRule(pattern99, lambda c, x, d, a, q, u, p, v, b : -p*Int(PolyLog(q + S(1), v)*log(u)**(p + S(-1))/((a + b*x)*(c + d*x)), x) + PolyLog(q + S(1), v)*log(u)**p/(-a*d + b*c))
rubi.add(rule99)
pattern100 = Pattern(Integral(PolyLog(q_, v_)*log(u_**n_*WC('e', S(1)))**p_/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Pow(v, S(-1)))), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Less(p, S(-1))))
rule100 = ReplacementRule(pattern100, lambda c, n, d, a, x, q, e, u, p, v, b : -Int(PolyLog(q + S(-1), v)*log(e*u**n)**(p + S(1))/((a + b*x)*(c + d*x)), x)/(n*(p + S(1))) + PolyLog(q, v)*log(e*u**n)**(p + S(1))/(n*(p + S(1))*(-a*d + b*c)))
rubi.add(rule100)
pattern101 = Pattern(Integral(PolyLog(q_, v_)*log(u_)**p_/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Pow(v, S(-1)))), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Less(p, S(-1))))
rule101 = ReplacementRule(pattern101, lambda c, x, d, a, q, u, p, v, b : -Int(PolyLog(q + S(-1), v)*log(u)**(p + S(1))/((a + b*x)*(c + d*x)), x)/(p + S(1)) + PolyLog(q, v)*log(u)**(p + S(1))/((p + S(1))*(-a*d + b*c)))
rubi.add(rule101)
pattern102 = Pattern(Integral(PolyLog(q_, v_)*log(u_**n_*WC('e', S(1)))**p_/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, v)), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Greater(p, S(1))))
rule102 = ReplacementRule(pattern102, lambda c, n, d, a, x, q, e, u, p, v, b : n*p*Int(PolyLog(q + S(1), v)*log(e*u**n)**(p + S(-1))/((a + b*x)*(c + d*x)), x) - PolyLog(q + S(1), v)*log(e*u**n)**p/(-a*d + b*c))
rubi.add(rule102)
pattern103 = Pattern(Integral(PolyLog(q_, v_)*log(u_)**p_/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, v)), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Greater(p, S(1))))
rule103 = ReplacementRule(pattern103, lambda c, x, d, a, q, u, p, v, b : p*Int(PolyLog(q + S(1), v)*log(u)**(p + S(-1))/((a + b*x)*(c + d*x)), x) - PolyLog(q + S(1), v)*log(u)**p/(-a*d + b*c))
rubi.add(rule103)
pattern104 = Pattern(Integral(PolyLog(q_, v_)*log(u_**n_*WC('e', S(1)))**p_/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, v)), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Less(p, S(-1))))
rule104 = ReplacementRule(pattern104, lambda c, n, d, a, x, q, e, u, p, v, b : Int(PolyLog(q + S(-1), v)*log(e*u**n)**(p + S(1))/((a + b*x)*(c + d*x)), x)/(n*(p + S(1))) + PolyLog(q, v)*log(e*u**n)**(p + S(1))/(n*(p + S(1))*(-a*d + b*c)))
rubi.add(rule104)
pattern105 = Pattern(Integral(PolyLog(q_, v_)*log(u_)**p_/((x_*WC('b', S(1)) + WC('a', S(0)))*(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, v)), x)), CustomConstraint(lambda x, c, a, d, u, v, b: FreeQ(simplify(Mul(u, Mul(Add(c, Mul(d, x)), Pow(Add(a, Mul(b, x)), S(-1))))), x)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda p: RationalQ(p)), CustomConstraint(lambda p: Less(p, S(-1))))
rule105 = ReplacementRule(pattern105, lambda c, x, d, a, q, u, p, v, b : Int(PolyLog(q + S(-1), v)*log(u)**(p + S(1))/((a + b*x)*(c + d*x)), x)/(p + S(1)) + PolyLog(q, v)*log(u)**(p + S(1))/((p + S(1))*(-a*d + b*c)))
rubi.add(rule105)
pattern106 = Pattern(Integral(WC('u', S(1))*log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1))/(x_**S(2)*WC('h', S(1)) + x_*WC('g', S(1)) + WC('f', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda c, f, d, a, h, b: ZeroQ(-a*c*h + b*d*f)), CustomConstraint(lambda g, c, d, a, h, b: ZeroQ(-a*d*h - b*c*h + b*d*g)))
rule106 = ReplacementRule(pattern106, lambda g, c, n, e1, a, d, f, x, n1, e, n2, h, u, p, b : b*d*Int(u*log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/((a + b*x)*(c + d*x)), x)/h)
rubi.add(rule106)
pattern107 = Pattern(Integral(WC('u', S(1))*log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1))/(x_**S(2)*WC('h', S(1)) + WC('f', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda d, b, c, a: NonzeroQ(-a*d + b*c)), CustomConstraint(lambda c, f, d, a, h, b: ZeroQ(-a*c*h + b*d*f)), CustomConstraint(lambda d, b, c, a: ZeroQ(a*d + b*c)))
rule107 = ReplacementRule(pattern107, lambda c, n, e1, a, d, f, x, n1, e, n2, h, u, p, b : b*d*Int(u*log(e*(e1*(a + b*x)**n1*(c + d*x)**(-n1))**n)**p/((a + b*x)*(c + d*x)), x)/h)
rubi.add(rule107)
pattern108 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))/(f_ + x_**S(2)*WC('h', S(1)) + x_*WC('g', S(1))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), )
def With108(g, c, n, e1, a, d, x, n2, n1, e, f, h, b):
u = IntHide(1/(f + g*x + h*x**S(2)), x)
return -n*(-a*d + b*c)*Int(u/((a + b*x)*(c + d*x)), x) + u*log(e*(e1*(a + b*x)**n1*(c + d*x)**n2)**n)
rule108 = ReplacementRule(pattern108, lambda g, c, n, e1, a, d, x, n2, n1, e, f, h, b : With108(g, c, n, e1, a, d, x, n2, n1, e, f, h, b))
rubi.add(rule108)
pattern109 = Pattern(Integral(log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))/(f_ + x_**S(2)*WC('h', S(1))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda h, x: FreeQ(h, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), )
def With109(c, n, e1, a, d, x, n2, n1, e, f, h, b):
u = IntHide(1/(f + h*x**S(2)), x)
return -n*(-a*d + b*c)*Int(u/((a + b*x)*(c + d*x)), x) + u*log(e*(e1*(a + b*x)**n1*(c + d*x)**n2)**n)
rule109 = ReplacementRule(pattern109, lambda c, n, e1, a, d, x, n2, n1, e, f, h, b : With109(c, n, e1, a, d, x, n2, n1, e, f, h, b))
rubi.add(rule109)
pattern110 = Pattern(Integral(RFx_*log(((x_*WC('b', S(1)) + WC('a', S(0)))**WC('n1', S(1))*(x_*WC('d', S(1)) + WC('c', S(0)))**n2_*WC('e1', S(1)))**WC('n', S(1))*WC('e', S(1)))**WC('p', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda e1, x: FreeQ(e1, x)), CustomConstraint(lambda n1, x: FreeQ(n1, x)), CustomConstraint(lambda n2, n1: ZeroQ(n1 + n2)), CustomConstraint(lambda RFx, x: RationalFunctionQ(RFx, x)), CustomConstraint(lambda p: PositiveIntegerQ(p)), CustomConstraint(lambda u, x: SumQ(u)))
def With110(c, n, e1, a, d, x, n2, n1, e, RFx, p, b):
u = ExpandIntegrand(log(e*(e1*(a + b*x)**n1*(c + d*x)**n2)**n)**p, RFx, x)
return Int(u, x)
rule110 = ReplacementRule(pattern110, lambda c, n, e1, a, d, x, n2, n1, e, RFx, p, b : With110(c, n, e1, a, d, x, n2, n1, e, RFx, p, b))
rubi.add(rule110)
pattern111 = Pattern(Integral(WC('u', S(1))*log(v_)**WC('p', S(1)), x_), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda x, v: QuotientOfLinearsQ(v, x)), CustomConstraint(lambda x, v: Not(QuotientOfLinearsMatchQ(v, x))), CustomConstraint(lambda x, lst, p, u: Not(OneQ(p) & ZeroQ(Part(lst, S(3))))))
def With111(p, x, v, u):
lst = QuotientOfLinearsParts(v, x)
return Int(u*log((x*Part(lst, S(2)) + Part(lst, S(1)))/(x*Part(lst, S(4)) + Part(lst, S(3))))**p, x)
rule111 = ReplacementRule(pattern111, lambda p, x, v, u : With111(p, x, v, u))
rubi.add(rule111)
pattern112 = Pattern(Integral(log((x_**n_*WC('b', S(1)) + WC('a', S(0)))**WC('p', S(1))*WC('c', S(1))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)))
rule112 = ReplacementRule(pattern112, lambda c, x, a, n, p, b : -b*n*p*Int(x**n/(a + b*x**n), x) + x*log(c*(a + b*x**n)**p))
rubi.add(rule112)
pattern113 = Pattern(Integral(log(v_**WC('p', S(1))*WC('c', S(1))), x_), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda x, v: BinomialQ(v, x)), CustomConstraint(lambda x, v: Not(BinomialMatchQ(v, x))))
rule113 = ReplacementRule(pattern113, lambda p, x, c, v : Int(log(c*ExpandToSum(v, x)**p), x))
rubi.add(rule113)
pattern114 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log((x_**n_*WC('e', S(1)) + WC('d', S(0)))**WC('p', S(1))*WC('c', S(1))))/(x_*WC('g', S(1)) + WC('f', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)))
rule114 = ReplacementRule(pattern114, lambda g, c, f, d, a, x, n, e, p, b : -b*e*n*p*Int(x**(n + S(-1))*log(f + g*x)/(d + e*x**n), x)/g + (a + b*log(c*(d + e*x**n)**p))*log(f + g*x)/g)
rubi.add(rule114)
pattern115 = Pattern(Integral((x_*WC('g', S(1)) + WC('f', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log((x_**n_*WC('e', S(1)) + WC('d', S(0)))**WC('p', S(1))*WC('c', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda m: NonzeroQ(m + S(1))))
rule115 = ReplacementRule(pattern115, lambda m, g, c, f, d, a, x, n, e, p, b : -b*e*n*p*Int(x**(n + S(-1))*(f + g*x)**(m + S(1))/(d + e*x**n), x)/(g*(m + S(1))) + (a + b*log(c*(d + e*x**n)**p))*(f + g*x)**(m + S(1))/(g*(m + S(1))))
rubi.add(rule115)
pattern116 = Pattern(Integral(u_**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(v_**WC('p', S(1))*WC('c', S(1)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda x, u: LinearQ(u, x)), CustomConstraint(lambda x, v: BinomialQ(v, x)), CustomConstraint(lambda x, v, u: Not(BinomialMatchQ(v, x) & LinearMatchQ(u, x))))
rule116 = ReplacementRule(pattern116, lambda m, c, x, a, u, p, v, b : Int((a + b*log(c*ExpandToSum(v, x)**p))*ExpandToSum(u, x)**m, x))
rubi.add(rule116)
pattern117 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log((x_**n_*WC('e', S(1)) + WC('d', S(0)))**WC('p', S(1))*WC('c', S(1))))*asin(x_*WC('g', S(1)) + WC('f', S(0)))**WC('m', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda m: PositiveIntegerQ(m)), )
def With117(m, g, c, f, a, d, x, n, e, p, b):
w = IntHide(asin(f + g*x)**m, x)
return -b*e*n*p*Int(SimplifyIntegrand(w*x**(n + S(-1))/(d + e*x**n), x), x) + Dist(a + b*log(c*(d + e*x**n)**p), w, x)
rule117 = ReplacementRule(pattern117, lambda m, g, c, f, a, d, x, n, e, p, b : With117(m, g, c, f, a, d, x, n, e, p, b))
rubi.add(rule117)
pattern118 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log((x_**S(2)*WC('e', S(1)) + WC('d', S(0)))**WC('p', S(1))*WC('c', S(1))))/(x_**S(2)*WC('g', S(1)) + WC('f', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), )
def With118(g, c, f, d, a, x, e, p, b):
u = IntHide(1/(f + g*x**S(2)), x)
return -S(2)*b*e*p*Int(u*x/(d + e*x**S(2)), x) + u*(a + b*log(c*(d + e*x**S(2))**p))
rule118 = ReplacementRule(pattern118, lambda g, c, f, d, a, x, e, p, b : With118(g, c, f, d, a, x, e, p, b))
rubi.add(rule118)
pattern119 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log((d_ + x_**S(2)*WC('e', S(1)))**WC('p', S(1))*WC('c', S(1))))**n_, x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda n: PositiveIntegerQ(n)))
rule119 = ReplacementRule(pattern119, lambda c, x, a, d, n, e, p, b : -S(2)*b*e*n*p*Int(x**S(2)*(a + b*log(c*(d + e*x**S(2))**p))**(n + S(-1))/(d + e*x**S(2)), x) + x*(a + b*log(c*(d + e*x**S(2))**p))**n)
rubi.add(rule119)
pattern120 = Pattern(Integral(x_**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log((d_ + x_**S(2)*WC('e', S(1)))**WC('p', S(1))*WC('c', S(1))))**n_, x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda n: PositiveIntegerQ(n)), CustomConstraint(lambda m: IntegerQ(m/S(2) + S(-1)/2)))
rule120 = ReplacementRule(pattern120, lambda m, c, x, a, d, n, e, p, b : Subst(Int(x**(m/S(2) + S(-1)/2)*(a + b*log(c*(d + e*x)**p))**n, x), x, x**S(2))/S(2))
rubi.add(rule120)
pattern121 = Pattern(Integral(x_**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log((d_ + x_**S(2)*WC('e', S(1)))**WC('p', S(1))*WC('c', S(1))))**n_, x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda n: PositiveIntegerQ(n)), CustomConstraint(lambda m: Not(IntegerQ(m/S(2) + S(-1)/2))))
rule121 = ReplacementRule(pattern121, lambda m, c, x, a, d, n, e, p, b : -S(2)*b*e*n*p*Int(x**(m + S(2))*(a + b*log(c*(d + e*x**S(2))**p))**(n + S(-1))/(d + e*x**S(2)), x)/(m + S(1)) + x**(m + S(1))*(a + b*log(c*(d + e*x**S(2))**p))**n/(m + S(1)))
rubi.add(rule121)
pattern122 = Pattern(Integral(u_*log(v_), x_), CustomConstraint(lambda w, v: Not(FalseQ(w))))
def With122(x, v, u):
w = DerivativeDivides(v, u*(-v + S(1)), x)
return w*PolyLog(S(2), -v + S(1))
rule122 = ReplacementRule(pattern122, lambda x, v, u : With122(x, v, u))
rubi.add(rule122)
pattern123 = Pattern(Integral(w_*(WC('a', S(0)) + WC('b', S(1))*log(u_))*log(v_), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda x, u: InverseFunctionFreeQ(u, x)), CustomConstraint(lambda z, x, v, b, a, u: Not(FalseQ(z))))
def With123(x, a, w, u, v, b):
z = DerivativeDivides(v, w*(-v + S(1)), x)
return -b*Int(SimplifyIntegrand(z*D(u, x)*PolyLog(S(2), -v + S(1))/u, x), x) + z*(a + b*log(u))*PolyLog(S(2), -v + S(1))
rule123 = ReplacementRule(pattern123, lambda x, a, w, u, v, b : With123(x, a, w, u, v, b))
rubi.add(rule123)
pattern124 = Pattern(Integral(log((a_ + (x_*WC('e', S(1)) + WC('d', S(0)))**n_*WC('b', S(1)))**WC('p', S(1))*WC('c', S(1))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda n: RationalQ(n)), CustomConstraint(lambda n: Less(n, S(0))))
rule124 = ReplacementRule(pattern124, lambda c, x, d, a, n, e, p, b : -b*n*p*Int(1/(a*(d + e*x)**(-n) + b), x) + (d + e*x)*log(c*(a + b*(d + e*x)**n)**p)/e)
rubi.add(rule124)
pattern125 = Pattern(Integral(log((a_ + (x_*WC('e', S(1)) + WC('d', S(0)))**WC('n', S(1))*WC('b', S(1)))**WC('p', S(1))*WC('c', S(1))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda n: Not(RationalQ(n) & Less(n, S(0)))))
rule125 = ReplacementRule(pattern125, lambda n, c, d, a, x, e, p, b : a*n*p*Int(1/(a + b*(d + e*x)**n), x) - n*p*x + (d + e*x)*log(c*(a + b*(d + e*x)**n)**p)/e)
rubi.add(rule125)
pattern126 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log((d_ + WC('e', S(1))/(x_*WC('g', S(1)) + WC('f', S(0))))**WC('p', S(1))*WC('c', S(1))))**WC('n', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda n: PositiveIntegerQ(n)))
rule126 = ReplacementRule(pattern126, lambda g, c, f, a, n, d, x, e, p, b : -b*e*n*p*Subst(Int((a + b*log(c*(d + e*x)**p))**(n + S(-1))/x, x), x, 1/(f + g*x))/(d*g) + (a + b*log(c*(d + e/(f + g*x))**p))**n*(d*(f + g*x) + e)/(d*g))
rubi.add(rule126)
pattern127 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(RFx_**WC('p', S(1))*WC('c', S(1))))**WC('n', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda RFx, x: RationalFunctionQ(RFx, x)), CustomConstraint(lambda n: PositiveIntegerQ(n)))
rule127 = ReplacementRule(pattern127, lambda n, c, a, x, RFx, p, b : -b*n*p*Int(SimplifyIntegrand(x*(a + b*log(RFx**p*c))**(n + S(-1))*D(RFx, x)/RFx, x), x) + x*(a + b*log(RFx**p*c))**n)
rubi.add(rule127)
pattern128 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(RFx_**WC('p', S(1))*WC('c', S(1))))**WC('n', S(1))/(x_*WC('e', S(1)) + WC('d', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda RFx, x: RationalFunctionQ(RFx, x)), CustomConstraint(lambda n: PositiveIntegerQ(n)))
rule128 = ReplacementRule(pattern128, lambda n, c, a, d, x, RFx, e, p, b : -b*n*p*Int((a + b*log(RFx**p*c))**(n + S(-1))*D(RFx, x)*log(d + e*x)/RFx, x)/e + (a + b*log(RFx**p*c))**n*log(d + e*x)/e)
rubi.add(rule128)
pattern129 = Pattern(Integral((x_*WC('e', S(1)) + WC('d', S(0)))**WC('m', S(1))*(WC('a', S(0)) + WC('b', S(1))*log(RFx_**WC('p', S(1))*WC('c', S(1))))**WC('n', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda RFx, x: RationalFunctionQ(RFx, x)), CustomConstraint(lambda n: PositiveIntegerQ(n)), CustomConstraint(lambda m, n: IntegerQ(m) | Equal(n, S(1))), CustomConstraint(lambda m: NonzeroQ(m + S(1))))
rule129 = ReplacementRule(pattern129, lambda m, n, c, a, d, x, RFx, e, p, b : -b*n*p*Int(SimplifyIntegrand((a + b*log(RFx**p*c))**(n + S(-1))*(d + e*x)**(m + S(1))*D(RFx, x)/RFx, x), x)/(e*(m + S(1))) + (a + b*log(RFx**p*c))**n*(d + e*x)**(m + S(1))/(e*(m + S(1))))
rubi.add(rule129)
pattern130 = Pattern(Integral(log(RFx_**WC('n', S(1))*WC('c', S(1)))/(d_ + x_**S(2)*WC('e', S(1))), x_), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda RFx, x: RationalFunctionQ(RFx, x)), CustomConstraint(lambda RFx, x: Not(PolynomialQ(RFx, x))), )
def With130(n, c, d, x, RFx, e):
u = IntHide(1/(d + e*x**S(2)), x)
return -n*Int(SimplifyIntegrand(u*D(RFx, x)/RFx, x), x) + u*log(RFx**n*c)
rule130 = ReplacementRule(pattern130, lambda n, c, d, x, RFx, e : With130(n, c, d, x, RFx, e))
rubi.add(rule130)
pattern131 = Pattern(Integral(log(Px_**WC('n', S(1))*WC('c', S(1)))/Qx_, x_), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda Qx, x, Px: QuadraticQ(List(Qx, Px), x)), CustomConstraint(lambda Qx, x, Px: ZeroQ(D(Px/Qx, x))), )
def With131(n, x, c, Qx, Px):
u = IntHide(1/Qx, x)
return -n*Int(SimplifyIntegrand(u*D(Px, x)/Px, x), x) + u*log(Px**n*c)
rule131 = ReplacementRule(pattern131, lambda n, x, c, Qx, Px : With131(n, x, c, Qx, Px))
rubi.add(rule131)
pattern132 = Pattern(Integral(RGx_*(WC('a', S(0)) + WC('b', S(1))*log(RFx_**WC('p', S(1))*WC('c', S(1))))**WC('n', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda RFx, x: RationalFunctionQ(RFx, x)), CustomConstraint(lambda x, RGx: RationalFunctionQ(RGx, x)), CustomConstraint(lambda n: PositiveIntegerQ(n)), CustomConstraint(lambda u, x: SumQ(u)))
def With132(n, c, a, x, RFx, RGx, p, b):
u = ExpandIntegrand((a + b*log(RFx**p*c))**n, RGx, x)
return Int(u, x)
rule132 = ReplacementRule(pattern132, lambda n, c, a, x, RFx, RGx, p, b : With132(n, c, a, x, RFx, RGx, p, b))
rubi.add(rule132)
pattern133 = Pattern(Integral(RGx_*(WC('a', S(0)) + WC('b', S(1))*log(RFx_**WC('p', S(1))*WC('c', S(1))))**WC('n', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda RFx, x: RationalFunctionQ(RFx, x)), CustomConstraint(lambda x, RGx: RationalFunctionQ(RGx, x)), CustomConstraint(lambda n: PositiveIntegerQ(n)), CustomConstraint(lambda u, x: SumQ(u)))
def With133(n, c, a, x, RFx, RGx, p, b):
u = ExpandIntegrand(RGx*(a + b*log(RFx**p*c))**n, x)
return Int(u, x)
rule133 = ReplacementRule(pattern133, lambda n, c, a, x, RFx, RGx, p, b : With133(n, c, a, x, RFx, RGx, p, b))
rubi.add(rule133)
pattern134 = Pattern(Integral(RFx_*(WC('a', S(0)) + WC('b', S(1))*log(u_)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda RFx, x: RationalFunctionQ(RFx, x)), CustomConstraint(lambda x, lst: Not(FalseQ(lst))))
def With134(x, a, RFx, u, b):
lst = SubstForFractionalPowerOfLinear(RFx*(a + b*log(u)), x)
return Part(lst, S(2))*Part(lst, S(4))*Subst(Int(Part(lst, S(1)), x), x, Part(lst, S(3))**(1/Part(lst, S(2))))
rule134 = ReplacementRule(pattern134, lambda x, a, RFx, u, b : With134(x, a, RFx, u, b))
rubi.add(rule134)
pattern135 = Pattern(Integral((x_*WC('g', S(1)) + WC('f', S(0)))**WC('m', S(1))*log((F_**((x_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1))))**WC('n', S(1))*WC('e', S(1)) + S(1)), x_), CustomConstraint(lambda F, x: FreeQ(F, x)), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda m: RationalQ(m)), CustomConstraint(lambda m: Greater(m, S(0))))
rule135 = ReplacementRule(pattern135, lambda m, g, n, c, a, f, F, x, e, b : g*m*Int((f + g*x)**(m + S(-1))*PolyLog(S(2), -e*(F**(c*(a + b*x)))**n), x)/(b*c*n*log(F)) - (f + g*x)**m*PolyLog(S(2), -e*(F**(c*(a + b*x)))**n)/(b*c*n*log(F)))
rubi.add(rule135)
pattern136 = Pattern(Integral((x_*WC('g', S(1)) + WC('f', S(0)))**WC('m', S(1))*log(d_ + (F_**((x_*WC('b', S(1)) + WC('a', S(0)))*WC('c', S(1))))**WC('n', S(1))*WC('e', S(1))), x_), CustomConstraint(lambda F, x: FreeQ(F, x)), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda m: RationalQ(m)), CustomConstraint(lambda m: Greater(m, S(0))), CustomConstraint(lambda d: NonzeroQ(d + S(-1))))
rule136 = ReplacementRule(pattern136, lambda m, g, n, c, a, f, d, F, x, e, b : Int((f + g*x)**m*log(S(1) + e*(F**(c*(a + b*x)))**n/d), x) - (f + g*x)**(m + S(1))*log(S(1) + e*(F**(c*(a + b*x)))**n/d)/(g*(m + S(1))) + (f + g*x)**(m + S(1))*log(d + e*(F**(c*(a + b*x)))**n)/(g*(m + S(1))))
rubi.add(rule136)
pattern137 = Pattern(Integral(log(x_*WC('e', S(1)) + sqrt(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))*WC('f', S(1)) + WC('d', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda e, c, f: ZeroQ(-c*f**S(2) + e**S(2))))
rule137 = ReplacementRule(pattern137, lambda f, c, d, a, x, e, b : f**S(2)*(-S(2)*a*c + b**S(2)/S(2))*Int(x/(-f*sqrt(a + b*x + c*x**S(2))*(-S(2)*a*e + b*d + x*(-b*e + S(2)*c*d)) + (-b*f**S(2) + S(2)*d*e)*(a + b*x + c*x**S(2))), x) + x*log(d + e*x + f*sqrt(a + b*x + c*x**S(2))))
rubi.add(rule137)
pattern138 = Pattern(Integral(log(x_*WC('e', S(1)) + sqrt(x_**S(2)*WC('c', S(1)) + WC('a', S(0)))*WC('f', S(1)) + WC('d', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda e, c, f: ZeroQ(-c*f**S(2) + e**S(2))))
rule138 = ReplacementRule(pattern138, lambda c, f, d, a, x, e : -a*c*f**S(2)*Int(x/(d*e*(a + c*x**S(2)) + f*sqrt(a + c*x**S(2))*(a*e - c*d*x)), x) + x*log(d + e*x + f*sqrt(a + c*x**S(2))))
rubi.add(rule138)
pattern139 = Pattern(Integral((x_*WC('g', S(1)))**WC('m', S(1))*log(x_*WC('e', S(1)) + sqrt(x_**S(2)*WC('c', S(1)) + x_*WC('b', S(1)) + WC('a', S(0)))*WC('f', S(1)) + WC('d', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda e, c, f: ZeroQ(-c*f**S(2) + e**S(2))), CustomConstraint(lambda m: NonzeroQ(m + S(1))), CustomConstraint(lambda m: IntegerQ(S(2)*m)))
rule139 = ReplacementRule(pattern139, lambda m, g, f, c, d, a, x, e, b : f**S(2)*(-S(4)*a*c + b**S(2))*Int((g*x)**(m + S(1))/(-f*sqrt(a + b*x + c*x**S(2))*(-S(2)*a*e + b*d + x*(-b*e + S(2)*c*d)) + (-b*f**S(2) + S(2)*d*e)*(a + b*x + c*x**S(2))), x)/(S(2)*g*(m + S(1))) + (g*x)**(m + S(1))*log(d + e*x + f*sqrt(a + b*x + c*x**S(2)))/(g*(m + S(1))))
rubi.add(rule139)
pattern140 = Pattern(Integral((x_*WC('g', S(1)))**WC('m', S(1))*log(x_*WC('e', S(1)) + sqrt(x_**S(2)*WC('c', S(1)) + WC('a', S(0)))*WC('f', S(1)) + WC('d', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda e, c, f: ZeroQ(-c*f**S(2) + e**S(2))), CustomConstraint(lambda m: NonzeroQ(m + S(1))), CustomConstraint(lambda m: IntegerQ(S(2)*m)))
rule140 = ReplacementRule(pattern140, lambda m, g, c, f, d, a, x, e : -a*c*f**S(2)*Int((g*x)**(m + S(1))/(d*e*(a + c*x**S(2)) + f*sqrt(a + c*x**S(2))*(a*e - c*d*x)), x)/(g*(m + S(1))) + (g*x)**(m + S(1))*log(d + e*x + f*sqrt(a + c*x**S(2)))/(g*(m + S(1))))
rubi.add(rule140)
pattern141 = Pattern(Integral(WC('v', S(1))*log(sqrt(u_)*WC('f', S(1)) + x_*WC('e', S(1)) + WC('d', S(0))), x_), CustomConstraint(lambda g, x: FreeQ(g, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda x, u: QuadraticQ(u, x)), CustomConstraint(lambda x, u: Not(QuadraticMatchQ(u, x))), CustomConstraint(lambda x, v: ZeroQ(v + S(-1)) | MatchQ(v, Condition((x*Optional(Pattern(g, Blank)))**Optional(Pattern(m, Blank))))))
rule141 = ReplacementRule(pattern141, lambda f, x, d, e, u, v : Int(v*log(d + e*x + f*sqrt(ExpandToSum(u, x))), x))
rubi.add(rule141)
pattern142 = Pattern(Integral(log(u_), x_), CustomConstraint(lambda x, u: InverseFunctionFreeQ(u, x)))
rule142 = ReplacementRule(pattern142, lambda x, u : x*log(u) - Int(SimplifyIntegrand(x*D(u, x)/u, x), x))
rubi.add(rule142)
pattern143 = Pattern(Integral(log(u_)/(x_*WC('b', S(1)) + WC('a', S(0))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda x, u: RationalFunctionQ(D(u, x)/u, x)), CustomConstraint(lambda x, a, u: NonzeroQ(a) | Not(BinomialQ(u, x) & ZeroQ(BinomialDegree(u, x)**S(2) + S(-1)))))
rule143 = ReplacementRule(pattern143, lambda b, x, a, u : -Int(SimplifyIntegrand(D(u, x)*log(a + b*x)/u, x), x)/b + log(u)*log(a + b*x)/b)
rubi.add(rule143)
pattern144 = Pattern(Integral((x_*WC('b', S(1)) + WC('a', S(0)))**WC('m', S(1))*log(u_), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda x, u: InverseFunctionFreeQ(u, x)), CustomConstraint(lambda m: NonzeroQ(m + S(1))))
rule144 = ReplacementRule(pattern144, lambda m, x, a, u, b : (a + b*x)**(m + S(1))*log(u)/(b*(m + S(1))) - Int(SimplifyIntegrand((a + b*x)**(m + S(1))*D(u, x)/u, x), x)/(b*(m + S(1))))
rubi.add(rule144)
pattern145 = Pattern(Integral(log(u_)/Qx_, x_), CustomConstraint(lambda Qx, x: QuadraticQ(Qx, x)), CustomConstraint(lambda x, u: InverseFunctionFreeQ(u, x)), )
def With145(Qx, x, u):
v = IntHide(1/Qx, x)
return v*log(u) - Int(SimplifyIntegrand(v*D(u, x)/u, x), x)
rule145 = ReplacementRule(pattern145, lambda Qx, x, u : With145(Qx, x, u))
rubi.add(rule145)
pattern146 = Pattern(Integral(u_**(x_*WC('a', S(1)))*log(u_), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda x, u: InverseFunctionFreeQ(u, x)))
rule146 = ReplacementRule(pattern146, lambda x, a, u : -Int(SimplifyIntegrand(u**(a*x + S(-1))*x*D(u, x), x), x) + u**(a*x)/a)
rubi.add(rule146)
pattern147 = Pattern(Integral(v_*log(u_), x_), CustomConstraint(lambda x, u: InverseFunctionFreeQ(u, x)), CustomConstraint(lambda x, w, u: InverseFunctionFreeQ(w, x)))
def With147(x, v, u):
w = IntHide(v, x)
return Dist(log(u), w, x) - Int(SimplifyIntegrand(w*D(u, x)/u, x), x)
rule147 = ReplacementRule(pattern147, lambda x, v, u : With147(x, v, u))
rubi.add(rule147)
pattern148 = Pattern(Integral(log(v_)*log(w_), x_), CustomConstraint(lambda x, v: InverseFunctionFreeQ(v, x)), CustomConstraint(lambda x, w: InverseFunctionFreeQ(w, x)))
rule148 = ReplacementRule(pattern148, lambda x, w, v : x*log(v)*log(w) - Int(SimplifyIntegrand(x*D(v, x)*log(w)/v, x), x) - Int(SimplifyIntegrand(x*D(w, x)*log(v)/w, x), x))
rubi.add(rule148)
pattern149 = Pattern(Integral(u_*log(v_)*log(w_), x_), CustomConstraint(lambda x, v: InverseFunctionFreeQ(v, x)), CustomConstraint(lambda x, w: InverseFunctionFreeQ(w, x)), CustomConstraint(lambda z, x, v, w: InverseFunctionFreeQ(z, x)))
def With149(x, w, v, u):
z = IntHide(u, x)
return Dist(log(v)*log(w), z, x) - Int(SimplifyIntegrand(z*D(v, x)*log(w)/v, x), x) - Int(SimplifyIntegrand(z*D(w, x)*log(v)/w, x), x)
rule149 = ReplacementRule(pattern149, lambda x, w, v, u : With149(x, w, v, u))
rubi.add(rule149)
pattern150 = Pattern(Integral(log(WC('a', S(1))*log(x_**WC('n', S(1))*WC('b', S(1)))**WC('p', S(1))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)))
rule150 = ReplacementRule(pattern150, lambda n, x, a, p, b : -n*p*Int(1/log(b*x**n), x) + x*log(a*log(b*x**n)**p))
rubi.add(rule150)
pattern151 = Pattern(Integral(log(WC('a', S(1))*log(x_**WC('n', S(1))*WC('b', S(1)))**WC('p', S(1)))/x_, x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)))
rule151 = ReplacementRule(pattern151, lambda n, x, a, p, b : (-p + log(a*log(b*x**n)**p))*log(b*x**n)/n)
rubi.add(rule151)
pattern152 = Pattern(Integral(x_**WC('m', S(1))*log(WC('a', S(1))*log(x_**WC('n', S(1))*WC('b', S(1)))**WC('p', S(1))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda m, x: FreeQ(m, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda m: NonzeroQ(m + S(1))))
rule152 = ReplacementRule(pattern152, lambda m, n, x, a, p, b : -n*p*Int(x**m/log(b*x**n), x)/(m + S(1)) + x**(m + S(1))*log(a*log(b*x**n)**p)/(m + S(1)))
rubi.add(rule152)
pattern153 = Pattern(Integral((WC('A', S(0)) + WC('B', S(1))*log(x_*WC('d', S(1)) + WC('c', S(0))))/sqrt(a_ + WC('b', S(1))*log(x_*WC('d', S(1)) + WC('c', S(0)))), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda A, x: FreeQ(A, x)), CustomConstraint(lambda B, x: FreeQ(B, x)), CustomConstraint(lambda b, B, A, a: NonzeroQ(A*b - B*a)))
rule153 = ReplacementRule(pattern153, lambda A, c, x, d, a, B, b : B*Int(sqrt(a + b*log(c + d*x)), x)/b + (A*b - B*a)*Int(1/sqrt(a + b*log(c + d*x)), x)/b)
rubi.add(rule153)
pattern154 = Pattern(Integral(f_**(WC('a', S(1))*log(u_)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda f, x: FreeQ(f, x)))
rule154 = ReplacementRule(pattern154, lambda x, f, a, u : Int(u**(a*log(f)), x))
rubi.add(rule154)
pattern155 = Pattern(Integral(u_, x_), CustomConstraint(lambda u: NonsumQ(u)), CustomConstraint(lambda x, lst: Not(FalseQ(lst))))
def With155(x, u):
lst = FunctionOfLog(u*x, x)
return Subst(Int(Part(lst, S(1)), x), x, log(Part(lst, S(2))))/Part(lst, S(3))
rule155 = ReplacementRule(pattern155, lambda x, u : With155(x, u))
#rubi.add(rule155)
pattern156 = Pattern(Integral(WC('u', S(1))*log(Gamma(v_)), x_))
rule156 = ReplacementRule(pattern156, lambda x, v, u : (-LogGamma(v) + log(Gamma(v)))*Int(u, x) + Int(u*LogGamma(v), x))
rubi.add(rule156)
pattern157 = Pattern(Integral((w_*WC('a', S(1)) + w_*WC('b', S(1))*log(v_)**WC('n', S(1)))**WC('p', S(1))*WC('u', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p: IntegerQ(p)))
rule157 = ReplacementRule(pattern157, lambda n, x, a, w, u, p, v, b : Int(u*w**p*(a + b*log(v)**n)**p, x))
rubi.add(rule157)
pattern158 = Pattern(Integral((WC('a', S(0)) + WC('b', S(1))*log(((x_*WC('f', S(1)) + WC('e', S(0)))**WC('p', S(1))*WC('d', S(1)))**WC('q', S(1))*WC('c', S(1))))**n_*WC('u', S(1)), x_), CustomConstraint(lambda a, x: FreeQ(a, x)), CustomConstraint(lambda b, x: FreeQ(b, x)), CustomConstraint(lambda c, x: FreeQ(c, x)), CustomConstraint(lambda d, x: FreeQ(d, x)), CustomConstraint(lambda e, x: FreeQ(e, x)), CustomConstraint(lambda f, x: FreeQ(f, x)), CustomConstraint(lambda n, x: FreeQ(n, x)), CustomConstraint(lambda p, x: FreeQ(p, x)), CustomConstraint(lambda q, x: FreeQ(q, x)), CustomConstraint(lambda x, u: AlgebraicFunctionQ(u, x)))
rule158 = ReplacementRule(pattern158, lambda c, f, d, a, q, x, n, e, u, p, b : Int(u*(a + b*log(c*(d*(e + f*x)**p)**q))**n, x))
rubi.add(rule158)
return rubi
| 200.520216
| 6,553
| 0.597597
| 27,440
| 148,786
| 3.213448
| 0.03309
| 0.389467
| 0.35761
| 0.038105
| 0.806764
| 0.791669
| 0.779568
| 0.766651
| 0.759847
| 0.752736
| 0
| 0.030893
| 0.133689
| 148,786
| 741
| 6,554
| 200.790823
| 0.653206
| 0.000114
| 0
| 0.02773
| 0
| 0
| 0.008974
| 0.000208
| 0
| 0
| 0
| 0
| 0
| 1
| 0.051993
| false
| 0
| 0.019064
| 0
| 0.12305
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
0c4340774c10c7e2d8f9c9a59b9127d2922352d3
| 18,096
|
py
|
Python
|
sdk/python/pulumi_commercetools/shipping_method.py
|
unplatform-io/pulumi-commercetools
|
b81b998f99995c2ab7eb05a45220d414ae414da3
|
[
"ECL-2.0",
"Apache-2.0"
] | 1
|
2021-03-05T10:13:36.000Z
|
2021-03-05T10:13:36.000Z
|
sdk/python/pulumi_commercetools/shipping_method.py
|
unplatform-io/pulumi-commercetools
|
b81b998f99995c2ab7eb05a45220d414ae414da3
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-09-17T07:23:39.000Z
|
2021-09-20T12:34:51.000Z
|
sdk/python/pulumi_commercetools/shipping_method.py
|
unplatform-io/pulumi-commercetools
|
b81b998f99995c2ab7eb05a45220d414ae414da3
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from . import _utilities
__all__ = ['ShippingMethodArgs', 'ShippingMethod']
@pulumi.input_type
class ShippingMethodArgs:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
is_default: Optional[pulumi.Input[bool]] = None,
key: Optional[pulumi.Input[str]] = None,
localized_description: Optional[pulumi.Input[Mapping[str, Any]]] = None,
name: Optional[pulumi.Input[str]] = None,
predicate: Optional[pulumi.Input[str]] = None,
tax_category_id: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a ShippingMethod resource.
:param pulumi.Input[bool] is_default: One shipping method in a project can be default
:param pulumi.Input[str] key: User-specific unique identifier for the shipping method
:param pulumi.Input[Mapping[str, Any]] localized_description: [LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
:param pulumi.Input[str] predicate: A Cart predicate which can be used to more precisely select a shipping method for a cart
:param pulumi.Input[str] tax_category_id: ID of a [Tax Category](https://docs.commercetools.com/api/projects/taxCategories#taxcategory)
"""
if description is not None:
pulumi.set(__self__, "description", description)
if is_default is not None:
pulumi.set(__self__, "is_default", is_default)
if key is not None:
pulumi.set(__self__, "key", key)
if localized_description is not None:
pulumi.set(__self__, "localized_description", localized_description)
if name is not None:
pulumi.set(__self__, "name", name)
if predicate is not None:
pulumi.set(__self__, "predicate", predicate)
if tax_category_id is not None:
pulumi.set(__self__, "tax_category_id", tax_category_id)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="isDefault")
def is_default(self) -> Optional[pulumi.Input[bool]]:
"""
One shipping method in a project can be default
"""
return pulumi.get(self, "is_default")
@is_default.setter
def is_default(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_default", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
"""
User-specific unique identifier for the shipping method
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter(name="localizedDescription")
def localized_description(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
[LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
"""
return pulumi.get(self, "localized_description")
@localized_description.setter
def localized_description(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "localized_description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def predicate(self) -> Optional[pulumi.Input[str]]:
"""
A Cart predicate which can be used to more precisely select a shipping method for a cart
"""
return pulumi.get(self, "predicate")
@predicate.setter
def predicate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "predicate", value)
@property
@pulumi.getter(name="taxCategoryId")
def tax_category_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of a [Tax Category](https://docs.commercetools.com/api/projects/taxCategories#taxcategory)
"""
return pulumi.get(self, "tax_category_id")
@tax_category_id.setter
def tax_category_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tax_category_id", value)
@pulumi.input_type
class _ShippingMethodState:
def __init__(__self__, *,
description: Optional[pulumi.Input[str]] = None,
is_default: Optional[pulumi.Input[bool]] = None,
key: Optional[pulumi.Input[str]] = None,
localized_description: Optional[pulumi.Input[Mapping[str, Any]]] = None,
name: Optional[pulumi.Input[str]] = None,
predicate: Optional[pulumi.Input[str]] = None,
tax_category_id: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[int]] = None):
"""
Input properties used for looking up and filtering ShippingMethod resources.
:param pulumi.Input[bool] is_default: One shipping method in a project can be default
:param pulumi.Input[str] key: User-specific unique identifier for the shipping method
:param pulumi.Input[Mapping[str, Any]] localized_description: [LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
:param pulumi.Input[str] predicate: A Cart predicate which can be used to more precisely select a shipping method for a cart
:param pulumi.Input[str] tax_category_id: ID of a [Tax Category](https://docs.commercetools.com/api/projects/taxCategories#taxcategory)
"""
if description is not None:
pulumi.set(__self__, "description", description)
if is_default is not None:
pulumi.set(__self__, "is_default", is_default)
if key is not None:
pulumi.set(__self__, "key", key)
if localized_description is not None:
pulumi.set(__self__, "localized_description", localized_description)
if name is not None:
pulumi.set(__self__, "name", name)
if predicate is not None:
pulumi.set(__self__, "predicate", predicate)
if tax_category_id is not None:
pulumi.set(__self__, "tax_category_id", tax_category_id)
if version is not None:
pulumi.set(__self__, "version", version)
@property
@pulumi.getter
def description(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "description")
@description.setter
def description(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "description", value)
@property
@pulumi.getter(name="isDefault")
def is_default(self) -> Optional[pulumi.Input[bool]]:
"""
One shipping method in a project can be default
"""
return pulumi.get(self, "is_default")
@is_default.setter
def is_default(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "is_default", value)
@property
@pulumi.getter
def key(self) -> Optional[pulumi.Input[str]]:
"""
User-specific unique identifier for the shipping method
"""
return pulumi.get(self, "key")
@key.setter
def key(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "key", value)
@property
@pulumi.getter(name="localizedDescription")
def localized_description(self) -> Optional[pulumi.Input[Mapping[str, Any]]]:
"""
[LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
"""
return pulumi.get(self, "localized_description")
@localized_description.setter
def localized_description(self, value: Optional[pulumi.Input[Mapping[str, Any]]]):
pulumi.set(self, "localized_description", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def predicate(self) -> Optional[pulumi.Input[str]]:
"""
A Cart predicate which can be used to more precisely select a shipping method for a cart
"""
return pulumi.get(self, "predicate")
@predicate.setter
def predicate(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "predicate", value)
@property
@pulumi.getter(name="taxCategoryId")
def tax_category_id(self) -> Optional[pulumi.Input[str]]:
"""
ID of a [Tax Category](https://docs.commercetools.com/api/projects/taxCategories#taxcategory)
"""
return pulumi.get(self, "tax_category_id")
@tax_category_id.setter
def tax_category_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "tax_category_id", value)
@property
@pulumi.getter
def version(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "version")
@version.setter
def version(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "version", value)
class ShippingMethod(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
is_default: Optional[pulumi.Input[bool]] = None,
key: Optional[pulumi.Input[str]] = None,
localized_description: Optional[pulumi.Input[Mapping[str, Any]]] = None,
name: Optional[pulumi.Input[str]] = None,
predicate: Optional[pulumi.Input[str]] = None,
tax_category_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Create a ShippingMethod resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] is_default: One shipping method in a project can be default
:param pulumi.Input[str] key: User-specific unique identifier for the shipping method
:param pulumi.Input[Mapping[str, Any]] localized_description: [LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
:param pulumi.Input[str] predicate: A Cart predicate which can be used to more precisely select a shipping method for a cart
:param pulumi.Input[str] tax_category_id: ID of a [Tax Category](https://docs.commercetools.com/api/projects/taxCategories#taxcategory)
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[ShippingMethodArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a ShippingMethod resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param ShippingMethodArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(ShippingMethodArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
is_default: Optional[pulumi.Input[bool]] = None,
key: Optional[pulumi.Input[str]] = None,
localized_description: Optional[pulumi.Input[Mapping[str, Any]]] = None,
name: Optional[pulumi.Input[str]] = None,
predicate: Optional[pulumi.Input[str]] = None,
tax_category_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = ShippingMethodArgs.__new__(ShippingMethodArgs)
__props__.__dict__["description"] = description
__props__.__dict__["is_default"] = is_default
__props__.__dict__["key"] = key
__props__.__dict__["localized_description"] = localized_description
__props__.__dict__["name"] = name
__props__.__dict__["predicate"] = predicate
__props__.__dict__["tax_category_id"] = tax_category_id
__props__.__dict__["version"] = None
super(ShippingMethod, __self__).__init__(
'commercetools:index/shippingMethod:ShippingMethod',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
description: Optional[pulumi.Input[str]] = None,
is_default: Optional[pulumi.Input[bool]] = None,
key: Optional[pulumi.Input[str]] = None,
localized_description: Optional[pulumi.Input[Mapping[str, Any]]] = None,
name: Optional[pulumi.Input[str]] = None,
predicate: Optional[pulumi.Input[str]] = None,
tax_category_id: Optional[pulumi.Input[str]] = None,
version: Optional[pulumi.Input[int]] = None) -> 'ShippingMethod':
"""
Get an existing ShippingMethod resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[bool] is_default: One shipping method in a project can be default
:param pulumi.Input[str] key: User-specific unique identifier for the shipping method
:param pulumi.Input[Mapping[str, Any]] localized_description: [LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
:param pulumi.Input[str] predicate: A Cart predicate which can be used to more precisely select a shipping method for a cart
:param pulumi.Input[str] tax_category_id: ID of a [Tax Category](https://docs.commercetools.com/api/projects/taxCategories#taxcategory)
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _ShippingMethodState.__new__(_ShippingMethodState)
__props__.__dict__["description"] = description
__props__.__dict__["is_default"] = is_default
__props__.__dict__["key"] = key
__props__.__dict__["localized_description"] = localized_description
__props__.__dict__["name"] = name
__props__.__dict__["predicate"] = predicate
__props__.__dict__["tax_category_id"] = tax_category_id
__props__.__dict__["version"] = version
return ShippingMethod(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter
def description(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "description")
@property
@pulumi.getter(name="isDefault")
def is_default(self) -> pulumi.Output[Optional[bool]]:
"""
One shipping method in a project can be default
"""
return pulumi.get(self, "is_default")
@property
@pulumi.getter
def key(self) -> pulumi.Output[Optional[str]]:
"""
User-specific unique identifier for the shipping method
"""
return pulumi.get(self, "key")
@property
@pulumi.getter(name="localizedDescription")
def localized_description(self) -> pulumi.Output[Optional[Mapping[str, Any]]]:
"""
[LocalizedString](https://docs.commercetools.com/api/types#localizedstring)
"""
return pulumi.get(self, "localized_description")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
return pulumi.get(self, "name")
@property
@pulumi.getter
def predicate(self) -> pulumi.Output[Optional[str]]:
"""
A Cart predicate which can be used to more precisely select a shipping method for a cart
"""
return pulumi.get(self, "predicate")
@property
@pulumi.getter(name="taxCategoryId")
def tax_category_id(self) -> pulumi.Output[Optional[str]]:
"""
ID of a [Tax Category](https://docs.commercetools.com/api/projects/taxCategories#taxcategory)
"""
return pulumi.get(self, "tax_category_id")
@property
@pulumi.getter
def version(self) -> pulumi.Output[int]:
return pulumi.get(self, "version")
| 42.881517
| 145
| 0.648596
| 2,079
| 18,096
| 5.425685
| 0.075036
| 0.088741
| 0.112855
| 0.087766
| 0.841046
| 0.81977
| 0.791489
| 0.782447
| 0.777926
| 0.767287
| 0
| 0.000073
| 0.241048
| 18,096
| 421
| 146
| 42.983373
| 0.821247
| 0.249668
| 0
| 0.790614
| 1
| 0
| 0.083605
| 0.018475
| 0
| 0
| 0
| 0
| 0
| 1
| 0.162455
| false
| 0.00361
| 0.018051
| 0.028881
| 0.277978
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0c4f809c8f68a51bcec974458d222334e72ede8c
| 1,965
|
py
|
Python
|
bilstm/callbacks.py
|
alperkesen/bilstm-crf
|
577f93e2ef7e5814dca41aa32e2731d52f496b79
|
[
"MIT"
] | null | null | null |
bilstm/callbacks.py
|
alperkesen/bilstm-crf
|
577f93e2ef7e5814dca41aa32e2731d52f496b79
|
[
"MIT"
] | null | null | null |
bilstm/callbacks.py
|
alperkesen/bilstm-crf
|
577f93e2ef7e5814dca41aa32e2731d52f496b79
|
[
"MIT"
] | null | null | null |
"""
Custom callbacks.
"""
from keras.callbacks import Callback
from seqeval.metrics import f1_score, classification_report
class F1score(Callback):
def __init__(self, steps, generator, preprocessor=None):
super(F1score, self).__init__()
self.steps = steps
self.generator = generator
self.p = preprocessor
def on_epoch_end(self, epoch, logs={}):
label_true = []
label_pred = []
for i in range(self.steps):
x_true, y_true = next(self.generator)
lengths = x_true[-1]
y_pred = self.model.predict_on_batch(x_true)
y_true = self.p.inverse_transform(y_true, lengths)
y_pred = self.p.inverse_transform(y_pred, lengths)
label_true.extend(y_true)
label_pred.extend(y_pred)
print("Validation scores")
score = f1_score(label_true, label_pred)
print(' - f1: {:04.2f}'.format(score * 100))
print(classification_report(label_true, label_pred))
logs['f1'] = score
class TestCallback(Callback):
def __init__(self, steps, generator, preprocessor=None):
super(TestCallback, self).__init__()
self.steps = steps
self.generator = generator
self.p = preprocessor
def on_epoch_end(self, epoch, logs={}):
label_true = []
label_pred = []
for i in range(self.steps):
x_true, y_true = next(self.generator)
lengths = x_true[-1]
y_pred = self.model.predict_on_batch(x_true)
y_true = self.p.inverse_transform(y_true, lengths)
y_pred = self.p.inverse_transform(y_pred, lengths)
label_true.extend(y_true)
label_pred.extend(y_pred)
print("Test scores")
score = f1_score(label_true, label_pred)
print(' - f1: {:04.2f}'.format(score * 100))
print(classification_report(label_true, label_pred))
logs['f1'] = score
| 31.693548
| 62
| 0.614758
| 245
| 1,965
| 4.636735
| 0.220408
| 0.06338
| 0.091549
| 0.09507
| 0.859155
| 0.859155
| 0.859155
| 0.859155
| 0.859155
| 0.764085
| 0
| 0.016129
| 0.2743
| 1,965
| 61
| 63
| 32.213115
| 0.780505
| 0.008651
| 0
| 0.826087
| 0
| 0
| 0.031959
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.086957
| false
| 0
| 0.043478
| 0
| 0.173913
| 0.130435
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a78e37d7573b7267a747f9fc9fdaf7991438238e
| 191
|
py
|
Python
|
andes/models/dc/__init__.py
|
cuihantao/Andes
|
6cdc057986c4a8382194ef440b6e92b8dfb77e25
|
[
"Apache-2.0"
] | 16
|
2017-06-16T14:21:04.000Z
|
2018-08-18T08:52:27.000Z
|
andes/models/dc/__init__.py
|
cuihantao/Andes
|
6cdc057986c4a8382194ef440b6e92b8dfb77e25
|
[
"Apache-2.0"
] | 1
|
2017-12-12T07:51:16.000Z
|
2017-12-12T07:51:16.000Z
|
andes/models/dc/__init__.py
|
cuihantao/Andes
|
6cdc057986c4a8382194ef440b6e92b8dfb77e25
|
[
"Apache-2.0"
] | 7
|
2017-12-10T07:32:36.000Z
|
2018-09-19T16:38:30.000Z
|
"""
DC models.
"""
from andes.models.dc.ground import Ground # NOQA
from andes.models.dc.node import Node # noqa
from andes.models.dc.rlc import R, L, C, RCp, RLCp, RLCs, RCs, RLs # noqa
| 23.875
| 74
| 0.691099
| 33
| 191
| 4
| 0.515152
| 0.204545
| 0.340909
| 0.386364
| 0.318182
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.172775
| 191
| 7
| 75
| 27.285714
| 0.835443
| 0.136126
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ac5c3aaeafd2dd16fd394fcd69d0bda5a2c37c1e
| 71,399
|
py
|
Python
|
voltha/core/global_handler.py
|
jonohart/voltha
|
87314cd53cb4c61e7e62b0ed3fc6da94603cc507
|
[
"Apache-2.0"
] | null | null | null |
voltha/core/global_handler.py
|
jonohart/voltha
|
87314cd53cb4c61e7e62b0ed3fc6da94603cc507
|
[
"Apache-2.0"
] | null | null | null |
voltha/core/global_handler.py
|
jonohart/voltha
|
87314cd53cb4c61e7e62b0ed3fc6da94603cc507
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2017 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
import structlog
from grpc import StatusCode
from twisted.internet.defer import inlineCallbacks
from twisted.internet.defer import returnValue
from common.utils.grpc_utils import twisted_async
from common.utils.id_generation import \
create_cluster_id, create_empty_broadcast_id
from voltha.core.config.config_root import ConfigRoot
from voltha.protos.device_pb2 import PmConfigs, Images, \
ImageDownload, ImageDownloads
from voltha.protos.common_pb2 import OperationResp
from voltha.protos.voltha_pb2 import \
add_VolthaGlobalServiceServicer_to_server, VolthaLocalServiceStub, \
VolthaGlobalServiceServicer, Voltha, VolthaInstances, VolthaInstance, \
LogicalDevice, Ports, Flows, FlowGroups, Device, SelfTestResponse, \
VolthaGlobalServiceStub, Devices, DeviceType, DeviceTypes, DeviceGroup, \
AlarmFilter, AlarmFilters
from voltha.registry import registry
from google.protobuf.empty_pb2 import Empty
from dispatcher import DispatchError
from voltha.protos import bbf_fiber_base_pb2 as fb
from voltha.protos.bbf_fiber_base_pb2 import ChannelgroupConfig, \
ChannelpartitionConfig, ChannelpairConfig, ChannelterminationConfig, \
OntaniConfig, VOntaniConfig, VEnetConfig
from voltha.protos.bbf_fiber_traffic_descriptor_profile_body_pb2 import \
TrafficDescriptorProfileData
from voltha.protos.bbf_fiber_tcont_body_pb2 import TcontsConfigData
from voltha.protos.bbf_fiber_gemport_body_pb2 import GemportsConfigData
from voltha.protos.bbf_fiber_multicast_gemport_body_pb2 import \
MulticastGemportsConfigData
from voltha.protos.bbf_fiber_multicast_distribution_set_body_pb2 import \
MulticastDistributionSetData
log = structlog.get_logger()
class GlobalHandler(VolthaGlobalServiceServicer):
xpon_object_type = {
'CreateChannelgroup': ChannelgroupConfig,
'UpdateChannelgroup': ChannelgroupConfig,
'DeleteChannelgroup': ChannelgroupConfig,
'CreateChannelpartition': ChannelpartitionConfig,
'UpdateChannelpartition': ChannelpartitionConfig,
'DeleteChannelpartition': ChannelpartitionConfig,
'CreateChannelpair': ChannelpairConfig,
'UpdateChannelpair': ChannelpairConfig,
'DeleteChannelpair': ChannelpairConfig,
'CreateChanneltermination': ChannelterminationConfig,
'UpdateChanneltermination': ChannelterminationConfig,
'DeleteChanneltermination': ChannelterminationConfig,
'CreateVOntani': VOntaniConfig,
'UpdateVOntani': VOntaniConfig,
'DeleteVOntani': VOntaniConfig,
'CreateOntani': OntaniConfig,
'UpdateOntani': OntaniConfig,
'DeleteOntani': OntaniConfig,
'CreateVEnet': VEnetConfig,
'UpdateVEnet': VEnetConfig,
'DeleteVEnet': VEnetConfig,
'CreateTrafficDescriptorProfileData': TrafficDescriptorProfileData,
'UpdateTrafficDescriptorProfileData': TrafficDescriptorProfileData,
'DeleteTrafficDescriptorProfileData': TrafficDescriptorProfileData,
'CreateTcontsConfigData': TcontsConfigData,
'UpdateTcontsConfigData': TcontsConfigData,
'DeleteTcontsConfigData': TcontsConfigData,
'CreateGemportsConfigData': GemportsConfigData,
'UpdateGemportsConfigData': GemportsConfigData,
'DeleteGemportsConfigData': GemportsConfigData,
'CreateMulticastGemportsConfigData': MulticastGemportsConfigData,
'UpdateMulticastGemportsConfigData': MulticastGemportsConfigData,
'DeleteMulticastGemportsConfigData': MulticastGemportsConfigData,
'CreateMulticastDistributionSetData': MulticastDistributionSetData,
'UpdateMulticastDistributionSetData': MulticastDistributionSetData,
'DeleteMulticastDistributionSetData': MulticastDistributionSetData
}
def __init__(self, dispatcher, instance_id, **init_kw):
self.dispatcher = dispatcher
self.instance_id = instance_id
self.init_kw = init_kw
self.root = None
self.stopped = False
def start(self):
log.debug('starting')
self.root = ConfigRoot(Voltha(**self.init_kw))
log.info('started')
return self
def register_grpc_service(self):
log.debug('registering')
registry('grpc_server').register(
add_VolthaGlobalServiceServicer_to_server, self)
log.info('registered')
def stop(self):
log.debug('stopping')
self.stopped = True
log.info('stopped')
# gRPC service method implementations. BE CAREFUL; THESE ARE CALLED ON
# the gRPC threadpool threads.
@twisted_async
def GetVoltha(self, request, context):
log.info('grpc-request', request=request)
return self.root.get('/', depth=1)
@twisted_async
def ListVolthaInstances(self, request, context):
log.info('grpc-request', request=request)
items = self.dispatcher.get_cluster_instances()
return VolthaInstances(items=items)
@twisted_async
@inlineCallbacks
def GetVolthaInstance(self, request, context):
log.info('grpc-request', request=request)
core_id = self.dispatcher.get_core_id_from_instance_id(request.id)
if not core_id:
log.info('invalid-instance-id', instance=request.id)
context.set_details('Voltha Instance error')
context.set_code(StatusCode.NOT_FOUND)
returnValue(VolthaInstance())
response = yield self.dispatcher.dispatch('GetVolthaInstance',
Empty(),
context,
core_id=core_id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Voltha Instance error')
context.set_code(response.error_code)
returnValue(VolthaInstance())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def ListLogicalDevices(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('ListLogicalDevices',
Empty(),
context,
broadcast=True)
log.debug('grpc-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetLogicalDevice(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('GetLogicalDevice',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details(
'Logical device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(LogicalDevice())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def ListLogicalDevicePorts(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('ListLogicalDevicePorts',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details(
'Logical device ports \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(Ports())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def ListLogicalDeviceFlows(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('ListLogicalDeviceFlows',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details(
'Logical device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(Flows())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def UpdateLogicalDeviceFlowTable(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch(
'UpdateLogicalDeviceFlowTable',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details(
'Logical device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(Empty())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def ListLogicalDeviceFlowGroups(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch(
'ListLogicalDeviceFlowGroups',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details(
'Logical device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(FlowGroups())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def UpdateLogicalDeviceFlowGroupTable(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch(
'UpdateLogicalDeviceFlowGroupTable',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details(
'Logical device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(Empty())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def ListDevices(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('ListDevices',
Empty(),
context,
broadcast=True)
log.debug('grpc-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def ListAdapters(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('ListAdapters',
Empty(),
context,
broadcast=True)
log.debug('grpc-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetDevice(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('GetDevice',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(Device())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def CreateDevice(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('CreateDevice',
request,
context)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Create device error')
context.set_code(response.error_code)
returnValue(Device())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def EnableDevice(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('EnableDevice',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(Device())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def DisableDevice(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('DisableDevice',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(Device())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def RebootDevice(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('RebootDevice',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(Device())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def DeleteDevice(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('DeleteDevice',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(Empty())
else:
log.debug('grpc-success-response', response=response)
returnValue(Empty())
@twisted_async
@inlineCallbacks
def ListDevicePorts(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('ListDevicePorts',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(Ports())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def ListDevicePmConfigs(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('ListDevicePmConfigs',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(PmConfigs())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def UpdateDevicePmConfigs(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('UpdateDevicePmConfigs',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(Empty())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def ListDeviceFlows(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('ListDeviceFlows',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(Flows())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def ListDeviceFlowGroups(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('ListDeviceFlowGroups',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(FlowGroups())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def ListDeviceTypes(self, request, context):
log.info('grpc-request', request=request)
# we always deflect this to the local instance, as we assume
# they all loaded the same adapters, supporting the same device
# types
response = yield self.dispatcher.dispatch('ListDeviceTypes',
request,
context)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device types error')
context.set_code(response.error_code)
returnValue(DeviceTypes())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetDeviceType(self, request, context):
log.info('grpc-request', request=request)
# we always deflect this to the local instance, as we assume
# they all loaded the same adapters, supporting the same device
# types
response = yield self.dispatcher.dispatch('GetDeviceType',
request,
context)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device type \'{}\' error'.format(
request.id))
context.set_code(response.error_code)
returnValue(DeviceType())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def ListDeviceGroups(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('ListDeviceGroups',
Empty(),
context,
broadcast=True)
log.debug('grpc-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetDeviceGroup(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('GetDeviceGroup',
request,
context,
id=request.id)
log.info('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device group\'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(DeviceGroup())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
# bbf_fiber rpcs start
@twisted_async
@inlineCallbacks
def GetAllChannelgroupConfig(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch(
'GetAllChannelgroupConfig',
Empty(),
context,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channelgroup error')
context.set_code(response.error_code)
returnValue(Empty())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def CreateChannelgroup(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context, _method_name)
@twisted_async
@inlineCallbacks
def UpdateChannelgroup(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.ChannelgroupConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.ChannelgroupConfig())
response = yield self.dispatcher.dispatch(
'UpdateChannelgroup',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channelgroup\'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(fb.ChannelgroupConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def DeleteChannelgroup(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.ChannelgroupConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.ChannelgroupConfig())
response = yield self.dispatcher.dispatch(
'DeleteChannelgroup',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channelgroup\'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(fb.ChannelgroupConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetAllChannelpartitionConfig(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch(
'GetAllChannelpartitionConfig',
Empty(),
context,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channelpartition error')
context.set_code(response.error_code)
returnValue(Empty())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def CreateChannelpartition(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.ChannelpartitionConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.ChannelpartitionConfig())
response = yield self.dispatcher.dispatch(
'CreateChannelpartition',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channelpartition\'{}\' error'.format(
request.id))
context.set_code(response.error_code)
returnValue(fb.ChannelpartitionConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def UpdateChannelpartition(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.ChannelpartitionConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.ChannelpartitionConfig())
response = yield self.dispatcher.dispatch(
'UpdateChannelpartition',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channelpartition\'{}\' error'.format(
request.id))
context.set_code(response.error_code)
returnValue(fb.ChannelpartitionConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def DeleteChannelpartition(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.ChannelpartitionConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.ChannelpartitionConfig())
response = yield self.dispatcher.dispatch(
'DeleteChannelpartition',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channelpartition\'{}\' error'.format(
request.id))
context.set_code(response.error_code)
returnValue(fb.ChannelpartitionConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetAllChannelpairConfig(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch(
'GetAllChannelpairConfig',
Empty(),
context,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channelpair error')
context.set_code(response.error_code)
returnValue(Empty())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def CreateChannelpair(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.ChannelpairConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.ChannelpairConfig())
response = yield self.dispatcher.dispatch(
'CreateChannelpair',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channelpair\'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(fb.ChannelpairConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def UpdateChannelpair(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.ChannelpairConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.ChannelpairConfig())
response = yield self.dispatcher.dispatch(
'UpdateChannelpair',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channelpair\'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(fb.ChannelpairConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def DeleteChannelpair(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.ChannelpairConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.ChannelpairConfig())
response = yield self.dispatcher.dispatch(
'DeleteChannelpair',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channelpair\'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(fb.ChannelpairConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetAllChannelterminationConfig(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch(
'GetAllChannelterminationConfig',
request,
context,
id=request.id)
log.info('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channeltermination \'{}\' error'.format(
request.id))
context.set_code(response.error_code)
returnValue(fb.ChannelterminationConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def CreateChanneltermination(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch(
'CreateChanneltermination',
request,
context,
id=request.id)
log.info('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channeltermination \'{}\' error'.format(
request.id))
context.set_code(response.error_code)
returnValue(fb.ChannelterminationConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def UpdateChanneltermination(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch(
'UpdateChanneltermination',
request,
context,
id=request.id)
log.info('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channeltermination \'{}\' error'.format(
request.id))
context.set_code(response.error_code)
returnValue(fb.ChannelterminationConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def DeleteChanneltermination(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch(
'DeleteChanneltermination',
request,
context,
id=request.id)
log.info('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Channeltermination \'{}\' error'.format(
request.id))
context.set_code(response.error_code)
returnValue(fb.ChannelterminationConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetAllOntaniConfig(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch(
'GetAllOntaniConfig',
Empty(),
context,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Ontani error')
context.set_code(response.error_code)
returnValue(Empty())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def CreateOntani(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.OntaniConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.OntaniConfig())
response = yield self.dispatcher.dispatch(
'CreateOntani',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Ontani \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(fb.OntaniConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def UpdateOntani(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.OntaniConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.OntaniConfig())
response = yield self.dispatcher.dispatch(
'UpdateOntani',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Ontani \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(fb.OntaniConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def DeleteOntani(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.OntaniConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.OntaniConfig())
response = yield self.dispatcher.dispatch(
'DeleteOntani',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Ontani \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(fb.OntaniConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetAllVOntaniConfig(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch(
'GetAllVOntaniConfig',
Empty(),
context,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('VOntani error')
context.set_code(response.error_code)
returnValue(Empty())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def CreateVOntani(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.VOntaniConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.VOntaniConfig())
response = yield self.dispatcher.dispatch(
'CreateVOntani',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('VOntani \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(fb.VOntaniConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def UpdateVOntani(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.VOntaniConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.VOntaniConfig())
response = yield self.dispatcher.dispatch(
'UpdateVOntani',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('VOntani \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(fb.VOntaniConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def DeleteVOntani(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.VOntaniConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.VOntaniConfig())
response = yield self.dispatcher.dispatch(
'DeleteVOntani',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('VOntani \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(fb.VOntaniConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetAllVEnetConfig(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch(
'GetAllVEnetConfig',
request,
context,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('VEnet error')
context.set_code(response.error_code)
returnValue(Empty())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def CreateVEnet(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.VEnetConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.VEnetConfig())
response = yield self.dispatcher.dispatch(
'CreateVEnet',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('VEnet \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(fb.VEnetConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def UpdateVEnet(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.VEnetConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.VEnetConfig())
response = yield self.dispatcher.dispatch(
'UpdateVEnet',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('VEnet \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(fb.VEnetConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def DeleteVEnet(self, request, context):
log.info('grpc-request', request=request)
try:
assert isinstance(request, fb.VEnetConfig)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(fb.VEnetConfig())
response = yield self.dispatcher.dispatch(
'DeleteVEnet',
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('VEnet \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(fb.VEnetConfig())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetAllTrafficDescriptorProfileData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.get_all_global_xpon_object_data (request, context,
_method_name)
@twisted_async
@inlineCallbacks
def CreateTrafficDescriptorProfileData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context,
_method_name)
@twisted_async
@inlineCallbacks
def UpdateTrafficDescriptorProfileData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context,
_method_name)
@twisted_async
@inlineCallbacks
def DeleteTrafficDescriptorProfileData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context,
_method_name)
@twisted_async
@inlineCallbacks
def GetAllTcontsConfigData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.get_all_global_xpon_object_data (request, context,
_method_name)
@twisted_async
@inlineCallbacks
def CreateTcontsConfigData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context, _method_name)
@twisted_async
@inlineCallbacks
def UpdateTcontsConfigData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context, _method_name)
@twisted_async
@inlineCallbacks
def DeleteTcontsConfigData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context, _method_name)
@twisted_async
@inlineCallbacks
def GetAllGemportsConfigData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.get_all_global_xpon_object_data (request, context,
_method_name)
@twisted_async
@inlineCallbacks
def CreateGemportsConfigData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context, _method_name)
@twisted_async
@inlineCallbacks
def UpdateGemportsConfigData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context, _method_name)
@twisted_async
@inlineCallbacks
def DeleteGemportsConfigData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context, _method_name)
@twisted_async
@inlineCallbacks
def GetAllMulticastGemportsConfigData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.get_all_global_xpon_object_data (request, context,
_method_name)
@twisted_async
@inlineCallbacks
def CreateMulticastGemportsConfigData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context, _method_name)
@twisted_async
@inlineCallbacks
def UpdateMulticastGemportsConfigData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context, _method_name)
@twisted_async
@inlineCallbacks
def DeleteMulticastGemportsConfigData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context, _method_name)
@twisted_async
@inlineCallbacks
def GetAllMulticastDistributionSetData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.get_all_global_xpon_object_data (request, context,
_method_name)
@twisted_async
@inlineCallbacks
def CreateMulticastDistributionSetData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context, _method_name)
@twisted_async
@inlineCallbacks
def UpdateMulticastDistributionSetData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context, _method_name)
@twisted_async
@inlineCallbacks
def DeleteMulticastDistributionSetData(self, request, context):
_method_name = sys._getframe().f_code.co_name
return self.manage_global_xpon_object (request, context, _method_name)
def get_all_global_xpon_object_data(self, request, context, method_name):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch(
method_name,
Empty(),
context,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('{}\' error' .format(type(request).__name__))
context.set_code(response.error_code)
returnValue(Empty())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
def manage_global_xpon_object(self, request, context, method_name):
log.info('grpc-request', request=request)
_xpon_object_type = self.xpon_object_type[method_name]
try:
assert isinstance(request, _xpon_object_type)
request.id = create_empty_broadcast_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(_xpon_object_type())
response = yield self.dispatcher.dispatch(
method_name,
request,
context,
id=request.id,
broadcast=True)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('{}\'{}\' error'.format(type(request).__name__,
request.id))
context.set_code(response.error_code)
returnValue(_xpon_object_type())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
# bbf_fiber rpcs end
@twisted_async
@inlineCallbacks
def CreateAlarmFilter(self, request, context):
log.info('grpc-request', request=request)
# Since AlarmFilter applies to the entire cluster, it will be assigned
# a global id (using a global core_id). Every Voltha instance will
# have the same data. Since the voltha instances are managed by
# docker swarm mode then whenever an instance goes down it will be
# brought up right away, hence reducing the chance of two instances
# having different data. In future phases, we should adopt the
# strategy of having a unique persistence model for cluster data
# compare to instance data
try:
assert isinstance(request, AlarmFilter)
request.id = create_cluster_id()
except AssertionError, e:
context.set_details(e.message)
context.set_code(StatusCode.INVALID_ARGUMENT)
returnValue(AlarmFilter())
response = yield self.dispatcher.dispatch('CreateAlarmFilter',
request,
context,
id=request.id,
broadcast=True)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Create alarm error')
context.set_code(response.error_code)
returnValue(AlarmFilter())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetAlarmFilter(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('GetAlarmFilter',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Alarm filter\'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(AlarmFilter())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def UpdateAlarmFilter(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('UpdateAlarmFilter',
request,
context,
id=request.id,
broadcast=True)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Alarm filter\'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(AlarmFilter())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def DeleteAlarmFilter(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('DeleteAlarmFilter',
request,
context,
id=request.id,
broadcast=True)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Alarm filter\'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(Empty())
else:
log.debug('grpc-success-response', response=response)
returnValue(Empty())
@twisted_async
@inlineCallbacks
def ListAlarmFilters(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('ListAlarmFilters',
Empty(),
context,
broadcast=True)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Alarm filters error')
context.set_code(response.error_code)
returnValue(AlarmFilter())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetImages(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('GetImages',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(Images())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def SelfTest(self, request, context):
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('SelfTest',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(SelfTestResponse())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def DownloadImage(self, request, context):
try:
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('DownloadImage',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
except Exception as e:
log.exception('grpc-exception', e=e)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(OperationResp(code=OperationResp.OPERATION_FAILURE))
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetImageDownloadStatus(self, request, context):
try:
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('GetImageDownloadStatus',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
except Exception as e:
log.exception('grpc-exception', e=e)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(ImageDownloads())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def GetImageDownload(self, request, context):
try:
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('GetImageDownload',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
except Exception as e:
log.exception('grpc-exception', e=e)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(ImageDownload())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def ListImageDownloads(self, request, context):
try:
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('ListImageDownloads',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
except Exception as e:
log.exception('grpc-exception', e=e)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(ImageDownloads())
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def CancelImageDownload(self, request, context):
try:
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('CancelImageDownload',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
except Exception as e:
log.exception('grpc-exception', e=e)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(OperationResp(code=OperationResp.OPERATION_FAILURE))
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def ActivateImageUpdate(self, request, context):
try:
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('ActivateImageUpdate',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
except Exception as e:
log.exception('grpc-exception', e=e)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(OperationResp(code=OperationResp.OPERATION_FAILURE))
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
@twisted_async
@inlineCallbacks
def RevertImageUpdate(self, request, context):
try:
log.info('grpc-request', request=request)
response = yield self.dispatcher.dispatch('RevertImageUpdate',
request,
context,
id=request.id)
log.debug('grpc-response', response=response)
except Exception as e:
log.exception('grpc-exception', e=e)
if isinstance(response, DispatchError):
log.warn('grpc-error-response', error=response.error_code)
context.set_details('Device \'{}\' error'.format(request.id))
context.set_code(response.error_code)
returnValue(OperationResp(code=OperationResp.OPERATION_FAILURE))
else:
log.debug('grpc-success-response', response=response)
returnValue(response)
| 42.348161
| 79
| 0.595779
| 6,378
| 71,399
| 6.543901
| 0.058012
| 0.082038
| 0.055203
| 0.062534
| 0.815919
| 0.809929
| 0.807725
| 0.806407
| 0.80449
| 0.796488
| 0
| 0.000407
| 0.311167
| 71,399
| 1,685
| 80
| 42.373294
| 0.848217
| 0.020182
| 0
| 0.814935
| 0
| 0
| 0.102681
| 0.033479
| 0
| 0
| 0
| 0
| 0.024675
| 0
| null | null | 0
| 0.013636
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ac5ed8ab485bdc6918c6976a1d8b9c40a855d0fb
| 6,726
|
py
|
Python
|
surfstat/tests/test_SurfStatF.py
|
rudimeier/BrainStat
|
a5ef474ffd70300ecf5fa464fff4a41e71f4b7a1
|
[
"BSD-3-Clause"
] | null | null | null |
surfstat/tests/test_SurfStatF.py
|
rudimeier/BrainStat
|
a5ef474ffd70300ecf5fa464fff4a41e71f4b7a1
|
[
"BSD-3-Clause"
] | null | null | null |
surfstat/tests/test_SurfStatF.py
|
rudimeier/BrainStat
|
a5ef474ffd70300ecf5fa464fff4a41e71f4b7a1
|
[
"BSD-3-Clause"
] | null | null | null |
import sys
sys.path.append("python")
from SurfStatF import *
import surfstat_wrap as sw
import numpy as np
import sys
import pytest
sw.matlab_init_surfstat()
def dummy_test(A, B):
try:
# wrap matlab functions
Wrapped_slm = sw.matlab_SurfStatF(A, B)
except:
pytest.skip("Original MATLAB code does not work with these inputs.")
# run python functions
Python_slm = py_SurfStatF(A, B)
testout_SurfStatF = []
# compare matlab-python outputs
for key in Wrapped_slm:
testout_SurfStatF.append(np.allclose(Python_slm[key], Wrapped_slm[key], \
rtol=1e-05, equal_nan=True))
assert all(flag == True for (flag) in testout_SurfStatF)
#### Test 1
def test_slm1_slm2_easy_int():
# slm1['coef'] is 2D array of integers
# slm1['X'] and slm2['X'] are the SAME, 2D array of integers
n = 5
p = 6
k = 2
v = 1
rng = np.random.default_rng()
slm1 = {}
slm1['X'] = rng.integers(100, size=(n,p))
slm1['df'] = (n-1)
slm1['SSE'] = rng.integers(1,100, size=(int(k*(k+1)/2),v))
slm1['coef'] = rng.integers(100, size=(p,v))
slm2 = {}
slm2['X'] = slm1['X']
slm2['df'] = n
slm2['SSE'] = rng.integers(1,100, size=(int(k*(k+1)/2),v))
slm2['coef'] =rng.integers(100, size=(p,v))
dummy_test(slm1, slm2)
#### Test 2
def test_slm1_slm2_middle_int():
# slm1['coef'] is 2D array of integers
# slm1['X'] and slm2['X'] are the SAME, 2D array of integers
n = np.random.randint(3,100)
p = np.random.randint(3,100)
k = np.random.randint(3,100)
v = np.random.randint(3,100)
rng = np.random.default_rng()
slm1 = {}
slm1['X'] = rng.integers(100, size=(n,p))
slm1['df'] = (n-1)
slm1['SSE'] = rng.integers(1,100, size=(int(k*(k+1)/2),v))
slm1['coef'] = rng.integers(100, size=(p,v))
slm2 = {}
slm2['X'] = slm1['X']
slm2['df'] = n
slm2['SSE'] = rng.integers(1,100, size=(int(k*(k+1)/2),v))
slm2['coef'] =rng.integers(100, size=(p,v))
dummy_test(slm1, slm2)
#### Test 3
def test_slm1_slm2_easy_random():
# slm1['coef'] is 2D random array
# slm1['X'] and slm2['X'] are the SAME, 2D random arrays
n = np.random.randint(3,100)
p = np.random.randint(3,100)
k = np.random.randint(3,100)
v = np.random.randint(3,100)
rng = np.random.default_rng()
slm1 = {}
slm1['X'] = np.random.rand(n,p)
slm1['df'] = n
slm1['SSE'] = np.random.rand(int(k*(k+1)/2),v)
slm1['coef'] = np.random.rand(p,v)
slm2 = {}
slm2['X'] = slm1['X']
slm2['df'] = p
slm2['SSE'] = np.random.rand(int(k*(k+1)/2),v)
slm2['coef'] = np.random.rand(p,v)
dummy_test(slm1, slm2)
#### Test 4
def test_slm1_slm2_coef3D_int_k3():
# k= 3
# slm1['coef'] is 3D array of integers
# slm1['X'] and slm2['X'] are the SAME, 2D arrays of integers
rng = np.random.default_rng()
n = np.random.randint(3,100)
p = np.random.randint(3,100)
k = 3
v = np.random.randint(3,100)
slm1 = {}
slm1['X'] = rng.integers(100, size=(n,p))
slm1['df'] = p
slm1['SSE'] = rng.integers(1,100, size=(int(k*(k+1)/2),v))
slm1['coef'] = np.ones((p,v,k)) + 2
slm2 = {}
slm2['X'] = slm1['X']
slm2['df'] = p+1
slm2['SSE'] = rng.integers(1,100, size=(int(k*(k+1)/2),v))
slm2['coef'] = np.ones((p,v,k))
dummy_test(slm1, slm2)
#### Test 5
def test_slm1_slm2_coef3D_int_k2():
# k = 2
# slm1['coef'] is 3D array of integers
# slm1['X'] and slm2['X'] are the SAME, 2D arrays of integers
rng = np.random.default_rng()
n = np.random.randint(3,100)
p = np.random.randint(3,100)
k = 2
v = np.random.randint(3,100)
slm1 = {}
slm1['X'] = rng.integers(100, size=(n,p))
slm1['df'] = p+1
slm1['SSE'] = rng.integers(1,100, size=(int(k*(k+1)/2),v))
slm1['coef'] = np.ones((p,v,k)) + 2
slm2 = {}
slm2['X'] = slm1['X']
slm2['df'] = p
slm2['SSE'] = rng.integers(1,100, size=(int(k*(k+1)/2),v))
slm2['coef'] = np.ones((p,v,k))
dummy_test(slm1, slm2)
#### Test 6
def test_slm1_slm2_coef3D_int_k1():
# k = 1
# slm1['coef'] is 3D array of integers
# slm1['X'] and slm2['X'] are the SAME, 2D arrays of integers
rng = np.random.default_rng()
n = np.random.randint(3,100)
p = np.random.randint(3,100)
k = 2
v = np.random.randint(3,100)
slm1 = {}
slm1['X'] = rng.integers(100, size=(n,p))
slm1['df'] = n
slm1['SSE'] = rng.integers(1,100, size=(int(k*(k+1)/2),v))
slm1['coef'] = rng.integers(1,100, size=(p,v,k))
slm2 = {}
slm2['X'] = slm1['X']
slm2['df'] = p
slm2['SSE'] = rng.integers(1,100, size=(int(k*(k+1)/2),v))
slm2['coef'] = rng.integers(1,100, size=(p,v,k))
dummy_test(slm1, slm2)
#### Test 7
def test_slm1_slm2_coef3D_random_k3():
# k= 3
# slm1['coef'] is 3D random array
# slm1['X'] and slm2['X'] are the SAME, 2D random array
n = np.random.randint(3,100)
p = np.random.randint(3,100)
k = 3
v = np.random.randint(3,100)
slm1 = {}
slm1['X'] = np.random.rand(n,p)
slm1['df'] = p
slm1['SSE'] = np.random.rand( int(k*(k+1)/2),v)
slm1['coef'] = np.random.rand(p,v,k)
slm2 = {}
slm2['X'] = slm1['X']
slm2['df'] = p+1
slm2['SSE'] = np.random.rand( int(k*(k+1)/2),v)
slm2['coef'] = np.random.rand(p,v,k)
dummy_test(slm1, slm2)
#### Test 8
def test_slm1_slm2_coef3D_random_k2():
# k = 2
# slm1['coef'] is 3D random array
# slm1['X'] and slm2['X'] are the SAME, 2D random array
n = np.random.randint(3,100)
p = np.random.randint(3,100)
k = 2
v = np.random.randint(3,100)
slm1 = {}
slm1['X'] = np.random.rand(n,p)
slm1['df'] = p+1
slm1['SSE'] = np.random.rand( int(k*(k+1)/2),v)
slm1['coef'] = np.random.rand(p,v,k)
slm2 = {}
slm2['X'] = slm1['X']
slm2['df'] = p
slm2['SSE'] = np.random.rand( int(k*(k+1)/2),v)
slm2['coef'] = np.random.rand(p,v,k)
dummy_test(slm1, slm2)
#### Test 9
def test_slm1_slm2_coef3D_random_k1():
# k = 1
# slm1['coef'] is 3D random array
# slm1['X'] and slm2['X'] are the SAME, 2D random array
n = np.random.randint(3,100)
p = np.random.randint(3,100)
k = 1
v = np.random.randint(3,100)
slm1 = {}
slm1['X'] = np.random.rand(n,p)
slm1['df'] = p+1
slm1['SSE'] = np.random.rand( int(k*(k+1)/2),v)
slm1['coef'] = np.random.rand(p,v,k)
slm2 = {}
slm2['X'] = slm1['X']
slm2['df'] = p
slm2['SSE'] = np.random.rand( int(k*(k+1)/2),v)
slm2['coef'] = np.random.rand(p,v,k)
dummy_test(slm1, slm2)
| 24.458182
| 75
| 0.550699
| 1,140
| 6,726
| 3.185965
| 0.085965
| 0.114537
| 0.107379
| 0.114537
| 0.852698
| 0.842236
| 0.80011
| 0.787445
| 0.781663
| 0.776432
| 0
| 0.086991
| 0.241154
| 6,726
| 274
| 76
| 24.547445
| 0.624608
| 0.149123
| 0
| 0.798817
| 0
| 0
| 0.043832
| 0
| 0
| 0
| 0
| 0
| 0.005917
| 1
| 0.059172
| false
| 0
| 0.035503
| 0
| 0.094675
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3bd59da93414941620c2a743476383906d0ba526
| 20,136
|
py
|
Python
|
scripts/test_heuristics.py
|
SConsul/FLITE
|
7e3f462e66845a5c05e909d6a21dc1862a58579b
|
[
"MIT"
] | null | null | null |
scripts/test_heuristics.py
|
SConsul/FLITE
|
7e3f462e66845a5c05e909d6a21dc1862a58579b
|
[
"MIT"
] | null | null | null |
scripts/test_heuristics.py
|
SConsul/FLITE
|
7e3f462e66845a5c05e909d6a21dc1862a58579b
|
[
"MIT"
] | null | null | null |
# Inputs
import torch
import numpy as np
from heuristics import *
# Test bounding box heuristic
def test_bbox_heuristic():
# Define arguments
test_paths = [['../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00401.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00411.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00421.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00431.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00441.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00451.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00461.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00471.jpg'],
['../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00481.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00491.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00501.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00511.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00521.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00531.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00541.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00551.jpg'],
['../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00401.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00411.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00421.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00431.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00441.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00451.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00461.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00471.jpg'],
['../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00001.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00011.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00021.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00031.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00041.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00051.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00061.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00071.jpg'],
['../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00561.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00571.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00581.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00591.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00591.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00591.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00591.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00591.jpg'],
['../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00241.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00251.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00261.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00271.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00281.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00291.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00301.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00311.jpg'],
['../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00001.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00011.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00021.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00031.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00041.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00051.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00061.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00071.jpg'],
['../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00081.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00091.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00101.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00111.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00121.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00131.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00141.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00151.jpg'],
['../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00481.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00491.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00501.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00511.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00521.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00531.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00541.jpg',
'../dataset/orbit_benchmark_224/train/P665/wallet/clutter/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q/P665--wallet--clutter--6sN1XsQ76GOCaJOabx-5M3zl4qtDQCp3nryHcTW821Q-00551.jpg'],
['../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00081.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00091.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00101.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00111.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00121.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00131.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00141.jpg',
'../dataset/orbit_benchmark_224/train/P665/door/clutter/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc/P665--door--clutter--VAVn7HMPfNinMzRJh301zoqxhbVGDrBbcK9mXjs56gc-00151.jpg'],
['../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00561.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00571.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00581.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00591.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00591.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00591.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00591.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00591.jpg'],
['../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00481.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00491.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00501.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00511.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00521.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00531.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00541.jpg',
'../dataset/orbit_benchmark_224/train/P665/bed/clutter/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0/P665--bed--clutter--R8xCSOIpEhfXO5NsSHOPdaYcm0GM_0P4rwImD2WAmP0-00551.jpg']]
test_paths = np.array(test_paths)
test_bbox_path = '../dataset/orbit_clutter_bounding_boxes'
# Compute heuristic
bbox_filter = BBox(test_paths, test_bbox_path)
bboxes = bbox_filter.get_batch_bbox()
ranked_idxs = bbox_filter.get_ranked_bbox_sizes()
print('BBoxes shape:', bboxes.shape)
print('Ranked idxs:', ranked_idxs)
# Test blur heuristic
def test_blur_heuristic():
# Define arguments
random_tensor = torch.rand((44,4,3,224,224))
# Compute heuristic
blur_filter = Blur(random_tensor)
top_k_idxs = blur_filter.get_least_blurry(4)
print(top_k_idxs)
if __name__ == '__main__':
test_bbox_heuristic()
| 157.3125
| 213
| 0.841925
| 1,970
| 20,136
| 8.452792
| 0.052792
| 0.069902
| 0.121067
| 0.138362
| 0.96625
| 0.963728
| 0.963728
| 0.963728
| 0.941809
| 0.941809
| 0
| 0.164527
| 0.021106
| 20,136
| 127
| 214
| 158.551181
| 0.68028
| 0.006158
| 0
| 0.070175
| 0
| 0.842105
| 0.939412
| 0.937762
| 0
| 0
| 0
| 0
| 0
| 1
| 0.017544
| false
| 0
| 0.026316
| 0
| 0.04386
| 0.026316
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
3bf6440e20a829bf346c854418afe3a3758e7806
| 71,513
|
py
|
Python
|
pkgs/ops-pkg/src/genie/libs/ops/nd/iosxr/tests/nd_output.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | 94
|
2018-04-30T20:29:15.000Z
|
2022-03-29T13:40:31.000Z
|
pkgs/ops-pkg/src/genie/libs/ops/nd/iosxr/tests/nd_output.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | 67
|
2018-12-06T21:08:09.000Z
|
2022-03-29T18:00:46.000Z
|
pkgs/ops-pkg/src/genie/libs/ops/nd/iosxr/tests/nd_output.py
|
jbronikowski/genielibs
|
200a34e5fe4838a27b5a80d5973651b2e34ccafb
|
[
"Apache-2.0"
] | 49
|
2018-06-29T18:59:03.000Z
|
2022-03-10T02:07:59.000Z
|
'''
Nd Genie Ops Object Outputs for IOSXR.
'''
class NdOutput(object):
ShowIpv6Neighbors = {
'interfaces': {
'GigabitEthernet0/0/0/0.90': {
'interface': 'Gi0/0/0/0.90',
'neighbors': {
'fe80::f816:3eff:fe26:1224': {
'age': '131',
'ip': 'fe80::f816:3eff:fe26:1224',
'link_layer_address': 'fa16.3e26.1224',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/1.90': {
'interface': 'Gi0/0/0/1.90',
'neighbors': {
'fe80::5c00:40ff:fe02:7': {
'age': '144',
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/0.110': {
'interface': 'Gi0/0/0/0.110',
'neighbors': {
'fe80::f816:3eff:fe26:1224': {
'age': '99',
'ip': 'fe80::f816:3eff:fe26:1224',
'link_layer_address': 'fa16.3e26.1224',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/0.115': {
'interface': 'Gi0/0/0/0.115',
'neighbors': {
'fe80::f816:3eff:fe26:1224': {
'age': '46',
'ip': 'fe80::f816:3eff:fe26:1224',
'link_layer_address': 'fa16.3e26.1224',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/0.120': {
'interface': 'Gi0/0/0/0.120',
'neighbors': {
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/0.390': {
'interface': 'Gi0/0/0/0.390',
'neighbors': {
'fe80::f816:3eff:fe26:1224': {
'age': '137',
'ip': 'fe80::f816:3eff:fe26:1224',
'link_layer_address': 'fa16.3e26.1224',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/0.410': {
'interface': 'Gi0/0/0/0.410',
'neighbors': {
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/0.415': {
'interface': 'Gi0/0/0/0.415',
'neighbors': {
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/0.420': {
'interface': 'Gi0/0/0/0.420',
'neighbors': {
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/1.110': {
'interface': 'Gi0/0/0/1.110',
'neighbors': {
'fe80::5c00:40ff:fe02:7': {
'age': '167',
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/1.115': {
'interface': 'Gi0/0/0/1.115',
'neighbors': {
'fe80::5c00:40ff:fe02:7': {
'age': '137',
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/1.120': {
'interface': 'Gi0/0/0/1.120',
'neighbors': {
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/1.390': {
'interface': 'Gi0/0/0/1.390',
'neighbors': {
'fe80::5c00:40ff:fe02:7': {
'age': '101',
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/1.410': {
'interface': 'Gi0/0/0/1.410',
'neighbors': {
'fe80::5c00:40ff:fe02:7': {
'age': '121',
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/1.415': {
'interface': 'Gi0/0/0/1.415',
'neighbors': {
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
'GigabitEthernet0/0/0/1.420': {
'interface': 'Gi0/0/0/1.420',
'neighbors': {
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
},
},
},
},
}
ShowIpv6NeighborsDetail = {
'interfaces': {
'GigabitEthernet0/0/0/0.90': {
'interface': 'Gi0/0/0/0.90',
'neighbors': {
'fe80::f816:3eff:fe26:1224': {
'age': '138',
'ip': 'fe80::f816:3eff:fe26:1224',
'link_layer_address': 'fa16.3e26.1224',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': 'Y',
'sync': '-',
'serg_flags': 'ff',
'origin': 'dynamic',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/1.90': {
'interface': 'Gi0/0/0/1.90',
'neighbors': {
'fe80::5c00:40ff:fe02:7': {
'age': '151',
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': 'Y',
'sync': '-',
'serg_flags': 'ff',
'origin': 'dynamic',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/0.110': {
'interface': 'Gi0/0/0/0.110',
'neighbors': {
'fe80::f816:3eff:fe26:1224': {
'age': '114',
'ip': 'fe80::f816:3eff:fe26:1224',
'link_layer_address': 'fa16.3e26.1224',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': 'Y',
'sync': '-',
'serg_flags': 'ff',
'origin': 'dynamic',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/0.115': {
'interface': 'Gi0/0/0/0.115',
'neighbors': {
'fe80::f816:3eff:fe26:1224': {
'age': '69',
'ip': 'fe80::f816:3eff:fe26:1224',
'link_layer_address': 'fa16.3e26.1224',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': 'Y',
'sync': '-',
'serg_flags': 'ff',
'origin': 'dynamic',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/0.120': {
'interface': 'Gi0/0/0/0.120',
'neighbors': {
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/0.390': {
'interface': 'Gi0/0/0/0.390',
'neighbors': {
'fe80::f816:3eff:fe26:1224': {
'age': '151',
'ip': 'fe80::f816:3eff:fe26:1224',
'link_layer_address': 'fa16.3e26.1224',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': 'Y',
'sync': '-',
'serg_flags': 'ff',
'origin': 'dynamic',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/0.410': {
'interface': 'Gi0/0/0/0.410',
'neighbors': {
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/0.415': {
'interface': 'Gi0/0/0/0.415',
'neighbors': {
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/0.420': {
'interface': 'Gi0/0/0/0.420',
'neighbors': {
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/1.110': {
'interface': 'Gi0/0/0/1.110',
'neighbors': {
'fe80::5c00:40ff:fe02:7': {
'age': '17',
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': 'Y',
'sync': '-',
'serg_flags': 'ff',
'origin': 'dynamic',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/1.115': {
'interface': 'Gi0/0/0/1.115',
'neighbors': {
'fe80::5c00:40ff:fe02:7': {
'age': '165',
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': 'Y',
'sync': '-',
'serg_flags': 'ff',
'origin': 'dynamic',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/1.120': {
'interface': 'Gi0/0/0/1.120',
'neighbors': {
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/1.390': {
'interface': 'Gi0/0/0/1.390',
'neighbors': {
'fe80::5c00:40ff:fe02:7': {
'age': '100',
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': 'Y',
'sync': '-',
'serg_flags': 'ff',
'origin': 'dynamic',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/1.410': {
'interface': 'Gi0/0/0/1.410',
'neighbors': {
'fe80::5c00:40ff:fe02:7': {
'age': '125',
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': 'Y',
'sync': '-',
'serg_flags': 'ff',
'origin': 'dynamic',
},
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/1.415': {
'interface': 'Gi0/0/0/1.415',
'neighbors': {
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/1.420': {
'interface': 'Gi0/0/0/1.420',
'neighbors': {
'Mcast adjacency': {
'age': '-',
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'location': '0/0/CPU0',
'static': '-',
'dynamic': '-',
'sync': '-',
'serg_flags': 'ff',
'origin': 'other',
},
},
},
},
}
ShowIpv6VrfAllInterface = {
'Bundle-Ether12': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': False,
},
'Bundle-Ether23': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': False,
},
'Loopback0': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': True,
'ipv6': {
'2001:2:2:2::2/128': {
'ipv6': '2001:2:2:2::2',
'ipv6_prefix_length': '128',
'ipv6_subnet': '2001:2:2:2::2',
},
'ipv6_link_local': 'fe80::8152:bfff:fed0:fbb5',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ffd0:fbb5', 'ff02::2', 'ff02::1', 'ff02::16', 'ff02::d'],
'ipv6_mtu': '1500',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'nd_dad': 'disabled',
'dad_attempts': '0',
'nd_reachable_time': '0',
'nd_cache_limit': '0',
'nd_adv_retrans_int': '0',
'stateless_autoconfig': True,
'table_id': '0xe0800000',
'complete_protocol_adj': '0',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'Loopback300': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'VRF1',
'vrf_id': '0x60000001',
'enabled': True,
'ipv6': {
'2001:2:2:2::2/128': {
'ipv6': '2001:2:2:2::2',
'ipv6_prefix_length': '128',
'ipv6_subnet': '2001:2:2:2::2',
},
'ipv6_link_local': 'fe80::8152:bfff:fed0:fbb5',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ffd0:fbb5', 'ff02::2', 'ff02::1', 'ff02::16', 'ff02::d'],
'ipv6_mtu': '1500',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'nd_dad': 'disabled',
'dad_attempts': '0',
'nd_reachable_time': '0',
'nd_cache_limit': '0',
'nd_adv_retrans_int': '0',
'stateless_autoconfig': True,
'table_id': '0xe0800001',
'complete_protocol_adj': '0',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'MgmtEth0/RP0/CPU0/0': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'management',
'vrf_id': '0x60000002',
'enabled': False,
},
'GigabitEthernet0/0/0/0': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': False,
},
'GigabitEthernet0/0/0/0.90': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': True,
'ipv6': {
'2001:10:12:90::2/64': {
'ipv6': '2001:10:12:90::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:12:90::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe0f:b2ec',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff0f:b2ec', 'ff02::2', 'ff02::1', 'ff02::a'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'stateless_autoconfig': True,
'table_id': '0xe0800000',
'complete_protocol_adj': '0',
'complete_glean_adj': '1',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
'nd_suppress': True,
},
},
'GigabitEthernet0/0/0/0.110': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': True,
'ipv6': {
'2001:10:12:110::2/64': {
'ipv6': '2001:10:12:110::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:12:110::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe0f:b2ec',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff0f:b2ec', 'ff02::2', 'ff02::1', 'ff02::16', 'ff02::5', 'ff02::6', 'ff02::d'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800000',
'complete_protocol_adj': '1',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/0.115': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': True,
'ipv6': {
'2001:10:12:115::2/64': {
'ipv6': '2001:10:12:115::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:12:115::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe0f:b2ec',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff0f:b2ec', 'ff02::2', 'ff02::1', 'ff1e:abcd:def1:2222::1', 'ff1e:abcd:def1:2222::2'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800000',
'complete_protocol_adj': '1',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/0.120': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': True,
'ipv6': {
'2001:10:12:120::2/64': {
'ipv6': '2001:10:12:120::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:12:120::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe0f:b2ec',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff0f:b2ec', 'ff02::2', 'ff02::1'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800000',
'complete_protocol_adj': '0',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/0.390': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'VRF1',
'vrf_id': '0x60000001',
'enabled': True,
'ipv6': {
'2001:10:12:90::2/64': {
'ipv6': '2001:10:12:90::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:12:90::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe0f:b2ec',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff0f:b2ec', 'ff02::2', 'ff02::1', 'ff02::a'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800001',
'complete_protocol_adj': '1',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/0.410': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'VRF1',
'vrf_id': '0x60000001',
'enabled': True,
'ipv6': {
'2001:10:12:110::2/64': {
'ipv6': '2001:10:12:110::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:12:110::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe0f:b2ec',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff0f:b2ec', 'ff02::2', 'ff02::1', 'ff02::16', 'ff02::5', 'ff02::6', 'ff02::d'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800001',
'complete_protocol_adj': '0',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/0.415': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'VRF1',
'vrf_id': '0x60000001',
'enabled': True,
'ipv6': {
'2001:10:12:115::2/64': {
'ipv6': '2001:10:12:115::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:12:115::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe0f:b2ec',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff0f:b2ec', 'ff02::2', 'ff02::1', 'ff1e:abcd:def1:2222::1', 'ff1e:abcd:def1:2222::2'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800001',
'complete_protocol_adj': '0',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/0.420': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'VRF1',
'vrf_id': '0x60000001',
'enabled': True,
'ipv6': {
'2001:10:12:120::2/64': {
'ipv6': '2001:10:12:120::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:12:120::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe0f:b2ec',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff0f:b2ec', 'ff02::2', 'ff02::1'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800001',
'complete_protocol_adj': '0',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/1': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': False,
},
'GigabitEthernet0/0/0/1.90': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': True,
'ipv6': {
'2001:10:23:90::2/64': {
'ipv6': '2001:10:23:90::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:23:90::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe59:8f2e',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff59:8f2e', 'ff02::2', 'ff02::1', 'ff02::a'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800000',
'complete_protocol_adj': '1',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/1.110': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': True,
'ipv6': {
'2001:10:23:110::2/64': {
'ipv6': '2001:10:23:110::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:23:110::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe59:8f2e',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff59:8f2e', 'ff02::2', 'ff02::1', 'ff02::16', 'ff02::5', 'ff02::6', 'ff02::d'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800000',
'complete_protocol_adj': '1',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/1.115': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': True,
'ipv6': {
'2001:10:23:115::2/64': {
'ipv6': '2001:10:23:115::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:23:115::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe59:8f2e',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff59:8f2e', 'ff02::2', 'ff02::1'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800000',
'complete_protocol_adj': '1',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/1.120': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': True,
'ipv6': {
'2001:10:23:120::2/64': {
'ipv6': '2001:10:23:120::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:23:120::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe59:8f2e',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff59:8f2e', 'ff02::2', 'ff02::1'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800000',
'complete_protocol_adj': '0',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/1.390': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'VRF1',
'vrf_id': '0x60000001',
'enabled': True,
'ipv6': {
'2001:10:23:90::2/64': {
'ipv6': '2001:10:23:90::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:23:90::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe59:8f2e',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff59:8f2e', 'ff02::2', 'ff02::1', 'ff02::a'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800001',
'complete_protocol_adj': '0',
'complete_glean_adj': '1',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/1.410': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'VRF1',
'vrf_id': '0x60000001',
'enabled': True,
'ipv6': {
'2001:10:23:110::2/64': {
'ipv6': '2001:10:23:110::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:23:110::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe59:8f2e',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff59:8f2e', 'ff02::2', 'ff02::1', 'ff02::16', 'ff02::5', 'ff02::6', 'ff02::d'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800001',
'complete_protocol_adj': '0',
'complete_glean_adj': '1',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/1.415': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'VRF1',
'vrf_id': '0x60000001',
'enabled': True,
'ipv6': {
'2001:10:23:115::2/64': {
'ipv6': '2001:10:23:115::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:23:115::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe59:8f2e',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff59:8f2e', 'ff02::2', 'ff02::1'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800001',
'complete_protocol_adj': '0',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/1.420': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'VRF1',
'vrf_id': '0x60000001',
'enabled': True,
'ipv6': {
'2001:10:23:120::2/64': {
'ipv6': '2001:10:23:120::2',
'ipv6_prefix_length': '64',
'ipv6_subnet': '2001:10:23:120::',
},
'ipv6_link_local': 'fe80::f816:3eff:fe59:8f2e',
'ipv6_groups': ['ff02::1:ff00:2', 'ff02::1:ff59:8f2e', 'ff02::2', 'ff02::1'],
'ipv6_mtu': '1518',
'ipv6_mtu_available': '1500',
'icmp_redirects': 'disabled',
'icmp_unreachables': 'enabled',
'nd_dad': 'enabled',
'dad_attempts': '1',
'nd_reachable_time': '0',
'nd_cache_limit': '1000000000',
'nd_adv_retrans_int': '0',
'nd_adv_duration': '160-240',
'nd_router_adv': '1800',
'stateless_autoconfig': True,
'table_id': '0xe0800001',
'complete_protocol_adj': '0',
'complete_glean_adj': '0',
'incomplete_protocol_adj': '0',
'incomplete_glean_adj': '0',
'dropped_protocol_req': '0',
'dropped_glean_req': '0',
},
},
'GigabitEthernet0/0/0/2': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': False,
},
'GigabitEthernet0/0/0/3': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': False,
},
'GigabitEthernet0/0/0/4': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': False,
},
'GigabitEthernet0/0/0/5': {
'ipv6_enabled': True,
'int_status': 'up',
'oper_status': 'up',
'vrf': 'default',
'vrf_id': '0x60000000',
'enabled': False,
},
}
ShowRunInterface = '''\
interface Bundle-Ether12
!
interface Loopback0
ipv4 address 10.16.2.2 255.255.255.255
ipv6 address 2001:2:2:2::2/128
!
interface MgmtEth0/RP0/CPU0/0
vrf management
ipv4 address 172.16.1.52 255.255.255.0
!
interface GigabitEthernet0/0/0/0
cdp
!
interface GigabitEthernet0/0/0/0.90
ipv4 address 10.12.90.2 255.255.255.0
ipv6 nd ra-lifetime 2000
ipv6 nd suppress-ra
ipv6 address 2001:10:12:90::2/64
encapsulation dot1q 90
!
interface GigabitEthernet0/0/0/0.110
ipv4 address 10.12.110.2 255.255.255.0
ipv6 address 2001:10:12:110::2/64
encapsulation dot1q 110
!
'''
ndOpsOutput = {
'interfaces': {
'GigabitEthernet0/0/0/1.420': {
'interface': 'Gi0/0/0/1.420',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/1.415': {
'interface': 'Gi0/0/0/1.415',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/1.410': {
'interface': 'Gi0/0/0/1.410',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
'fe80::5c00:40ff:fe02:7': {
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'age': '125',
'origin': 'dynamic',
},
},
},
'GigabitEthernet0/0/0/1.390': {
'interface': 'Gi0/0/0/1.390',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
'fe80::5c00:40ff:fe02:7': {
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'age': '100',
'origin': 'dynamic',
},
},
},
'GigabitEthernet0/0/0/1.120': {
'interface': 'Gi0/0/0/1.120',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/1.115': {
'interface': 'Gi0/0/0/1.115',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
'fe80::5c00:40ff:fe02:7': {
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'age': '165',
'origin': 'dynamic',
},
},
},
'GigabitEthernet0/0/0/1.110': {
'interface': 'Gi0/0/0/1.110',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
'fe80::5c00:40ff:fe02:7': {
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'age': '17',
'origin': 'dynamic',
},
},
},
'GigabitEthernet0/0/0/0.420': {
'interface': 'Gi0/0/0/0.420',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/0.415': {
'interface': 'Gi0/0/0/0.415',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/0.410': {
'interface': 'Gi0/0/0/0.410',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/0.390': {
'interface': 'Gi0/0/0/0.390',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
'fe80::f816:3eff:fe26:1224': {
'ip': 'fe80::f816:3eff:fe26:1224',
'link_layer_address': 'fa16.3e26.1224',
'neighbor_state': 'REACH',
'age': '151',
'origin': 'dynamic',
},
},
},
'GigabitEthernet0/0/0/0.120': {
'interface': 'Gi0/0/0/0.120',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
},
},
'GigabitEthernet0/0/0/0.115': {
'interface': 'Gi0/0/0/0.115',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
'fe80::f816:3eff:fe26:1224': {
'ip': 'fe80::f816:3eff:fe26:1224',
'link_layer_address': 'fa16.3e26.1224',
'neighbor_state': 'REACH',
'age': '69',
'origin': 'dynamic',
},
},
},
'GigabitEthernet0/0/0/0.110': {
'interface': 'Gi0/0/0/0.110',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
'fe80::f816:3eff:fe26:1224': {
'ip': 'fe80::f816:3eff:fe26:1224',
'link_layer_address': 'fa16.3e26.1224',
'neighbor_state': 'REACH',
'age': '114',
'origin': 'dynamic',
},
},
},
'GigabitEthernet0/0/0/1.90': {
'interface': 'Gi0/0/0/1.90',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
'fe80::5c00:40ff:fe02:7': {
'ip': 'fe80::5c00:40ff:fe02:7',
'link_layer_address': '5e00.4002.0007',
'neighbor_state': 'REACH',
'age': '151',
'origin': 'dynamic',
},
},
},
'GigabitEthernet0/0/0/0.90': {
'interface': 'Gi0/0/0/0.90',
'router_advertisement': {
'suppress': True,
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
'fe80::f816:3eff:fe26:1224': {
'ip': 'fe80::f816:3eff:fe26:1224',
'link_layer_address': 'fa16.3e26.1224',
'neighbor_state': 'REACH',
'age': '138',
'origin': 'dynamic',
},
},
},
},
}
ShowRunInterface_custom = '''\
interface GigabitEthernet0/0/0/0.390
vrf VRF1
ipv4 address 10.12.90.2 255.255.255.0
ipv6 address 2001:10:12:90::2/64
encapsulation dot1q 390
!
'''
ndOpsOutput_custom = {
'interfaces': {
'GigabitEthernet0/0/0/0.390': {
'interface': 'Gi0/0/0/0.390',
'router_advertisement': {
'interval': '160-240',
'lifetime': '1800',
},
'neighbors': {
'Mcast adjacency': {
'ip': 'Mcast adjacency',
'link_layer_address': '0000.0000.0000',
'neighbor_state': 'REACH',
'age': '-',
'origin': 'other',
},
'fe80::f816:3eff:fe26:1224': {
'ip': 'fe80::f816:3eff:fe26:1224',
'link_layer_address': 'fa16.3e26.1224',
'neighbor_state': 'REACH',
'age': '151',
'origin': 'dynamic',
},
},
},
},
}
| 40.748148
| 146
| 0.345504
| 5,353
| 71,513
| 4.418831
| 0.03512
| 0.020039
| 0.052084
| 0.054959
| 0.972986
| 0.966644
| 0.961317
| 0.961317
| 0.961317
| 0.961317
| 0
| 0.150364
| 0.504789
| 71,513
| 1,754
| 147
| 40.77138
| 0.517564
| 0.000531
| 0
| 0.815835
| 0
| 0
| 0.382043
| 0.065312
| 0
| 0
| 0.006455
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0.00459
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
3bfe9dbb3c98307d9dcea9e478ee458d57b9fd43
| 194
|
py
|
Python
|
src/manage_switches/All.py
|
greenfructose/net-switch-tools
|
bb84fd103f556e4b29d553e6093237927b4907cb
|
[
"MIT"
] | null | null | null |
src/manage_switches/All.py
|
greenfructose/net-switch-tools
|
bb84fd103f556e4b29d553e6093237927b4907cb
|
[
"MIT"
] | null | null | null |
src/manage_switches/All.py
|
greenfructose/net-switch-tools
|
bb84fd103f556e4b29d553e6093237927b4907cb
|
[
"MIT"
] | null | null | null |
from src.manage_switches.SwitchFunctions import *
from src.manage_switches.DocFunctions import *
from src.manage_switches.DoConcurrent import *
from src.manage_switches.NetworkFunctions import *
| 48.5
| 50
| 0.860825
| 24
| 194
| 6.791667
| 0.375
| 0.171779
| 0.319018
| 0.515337
| 0.496933
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07732
| 194
| 4
| 50
| 48.5
| 0.910615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ce1143ce8b08e87185112658e1aac38700bb0550
| 131
|
py
|
Python
|
Cell-200/Cell-200_64x64/cGAN-concat/models/__init__.py
|
asatk/improved_CcGAN
|
29a58e6e2a03e56c2ad80ae1a2ebbd0710e026f3
|
[
"MIT"
] | 1
|
2022-02-26T00:07:37.000Z
|
2022-02-26T00:07:37.000Z
|
Cell-200/Cell-200_64x64/cGAN-concat/models/__init__.py
|
asatk/improved_CcGAN
|
29a58e6e2a03e56c2ad80ae1a2ebbd0710e026f3
|
[
"MIT"
] | null | null | null |
Cell-200/Cell-200_64x64/cGAN-concat/models/__init__.py
|
asatk/improved_CcGAN
|
29a58e6e2a03e56c2ad80ae1a2ebbd0710e026f3
|
[
"MIT"
] | null | null | null |
from .cond_cnn_generator_discriminator import *
from .ResNet_regre import *
from .ResNet_class import *
from .autoencoder import *
| 26.2
| 47
| 0.816794
| 17
| 131
| 6
| 0.588235
| 0.294118
| 0.313725
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122137
| 131
| 4
| 48
| 32.75
| 0.886957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ce1fa662502256a7fce61959cefcb28cdf2dbf32
| 237
|
py
|
Python
|
ardent/presets/__init__.py
|
DevinCrowley/ardent
|
5e731947e58b8670463521abe6a52d4962d4221f
|
[
"Apache-2.0"
] | 15
|
2019-07-16T19:33:14.000Z
|
2022-01-30T13:50:55.000Z
|
ardent/presets/__init__.py
|
DevinCrowley/ardent
|
5e731947e58b8670463521abe6a52d4962d4221f
|
[
"Apache-2.0"
] | 20
|
2019-08-13T21:59:06.000Z
|
2022-03-15T13:40:57.000Z
|
ardent/presets/__init__.py
|
DevinCrowley/ardent
|
5e731947e58b8670463521abe6a52d4962d4221f
|
[
"Apache-2.0"
] | 6
|
2019-08-22T03:42:46.000Z
|
2020-07-15T23:15:49.000Z
|
from .batch_preprocessing import basic_preprocessing
from .batch_preprocessing import basic_preprocessing_with_pad
from .registration_parameters import get_registration_preset
from .registration_parameters import get_registration_presets
| 59.25
| 61
| 0.919831
| 28
| 237
| 7.357143
| 0.428571
| 0.087379
| 0.213592
| 0.271845
| 0.902913
| 0.902913
| 0
| 0
| 0
| 0
| 0
| 0
| 0.063291
| 237
| 4
| 62
| 59.25
| 0.927928
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
ce24a7d849d7190dc9192ad1e24782aa05ea608f
| 1,034
|
py
|
Python
|
esppy/helper/helpers.py
|
Mentos05/SAS_AskTheExpert
|
623d10f1822e2fbf398dc7c2af1c6abb5d832668
|
[
"Apache-2.0"
] | null | null | null |
esppy/helper/helpers.py
|
Mentos05/SAS_AskTheExpert
|
623d10f1822e2fbf398dc7c2af1c6abb5d832668
|
[
"Apache-2.0"
] | null | null | null |
esppy/helper/helpers.py
|
Mentos05/SAS_AskTheExpert
|
623d10f1822e2fbf398dc7c2af1c6abb5d832668
|
[
"Apache-2.0"
] | null | null | null |
#Helpfer Function:
def create_scoring_schema(number_objects):
_field = "id*:int64,image:blob,_image_:blob,_nObjects_:double,"
for obj in range(0,number_objects):
_field += "_Object" + str(obj) + "_:string,"
_field += "_P_Object" + str(obj) + "_:double,"
_field += "_Object" + str(obj) + "_x:double,"
_field += "_Object" + str(obj) + "_y:double,"
_field += "_Object" + str(obj) + "_width:double,"
_field += "_Object" + str(obj) + "_height:double,"
return _field[:-1]
def create_scoring_schema(number_objects):
_field = "id*:int64,image:blob,_image_:blob,_nObjects_:double,"
for obj in range(0,number_objects):
_field += "_Object" + str(obj) + "_:string,"
_field += "_P_Object" + str(obj) + "_:double,"
_field += "_Object" + str(obj) + "_x:double,"
_field += "_Object" + str(obj) + "_y:double,"
_field += "_Object" + str(obj) + "_width:double,"
_field += "_Object" + str(obj) + "_height:double,"
return _field[:-1]
| 44.956522
| 67
| 0.592843
| 122
| 1,034
| 4.54918
| 0.245902
| 0.194595
| 0.259459
| 0.306306
| 0.972973
| 0.972973
| 0.972973
| 0.972973
| 0.972973
| 0.972973
| 0
| 0.009938
| 0.22147
| 1,034
| 22
| 68
| 47
| 0.679503
| 0.016441
| 0
| 1
| 0
| 0
| 0.320866
| 0.102362
| 0
| 0
| 0
| 0
| 0
| 1
| 0.1
| false
| 0
| 0
| 0
| 0.2
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ce34deab8543e9b9a75ff123a82ec1fe4089fb41
| 15,510
|
py
|
Python
|
src/model_ops/lenet.py
|
hwang595/Draco
|
8472912cce82e6d74087a402fd417e7a837517ab
|
[
"MIT"
] | 21
|
2018-09-19T06:30:57.000Z
|
2022-03-25T22:44:39.000Z
|
src/model_ops/lenet.py
|
hwang595/Draco
|
8472912cce82e6d74087a402fd417e7a837517ab
|
[
"MIT"
] | 3
|
2018-12-31T05:44:22.000Z
|
2021-09-09T15:59:46.000Z
|
src/model_ops/lenet.py
|
hwang595/Draco
|
8472912cce82e6d74087a402fd417e7a837517ab
|
[
"MIT"
] | 12
|
2018-09-19T06:30:59.000Z
|
2021-12-13T09:53:54.000Z
|
import torch
from torch import nn
import torch.nn.functional as F
import pandas as pd
import numpy as np
from torch.autograd import Variable
from mpi4py import MPI
import sys
from utils import err_simulation
sys.path.insert(0, '../compress_gradient')
from compress_gradient import compress
#SEED_=428
#torch.manual_seed(SEED_)
# we use LeNet here for our simple case
class LeNet(nn.Module):
def __init__(self):
super(LeNet, self).__init__()
self.conv1 = nn.Conv2d(1, 20, 5, 1)
self.conv2 = nn.Conv2d(20, 50, 5, 1)
self.fc1 = nn.Linear(4*4*50, 500)
self.fc2 = nn.Linear(500, 10)
self.ceriation = nn.CrossEntropyLoss()
def forward(self, x):
x = self.conv1(x)
x = F.max_pool2d(x, 2, 2)
x = F.relu(x)
x = self.conv2(x)
x = F.max_pool2d(x, 2, 2)
x = F.relu(x)
x = x.view(-1, 4*4*50)
x = self.fc1(x)
x = self.fc2(x)
#loss = self.ceriation(x, target)
return x
def name(self):
return 'lenet'
class LeNetSplit(nn.Module):
'''
this is a module that we split the module and do backward process layer by layer
please don't call this module for normal uses, this is a hack and run slower than
the automatic chain rule version
'''
def __init__(self):
super(LeNetSplit, self).__init__()
self.conv1 = nn.Conv2d(1, 20, 5, 1)
self.conv2 = nn.Conv2d(20, 50, 5, 1)
self.fc1 = nn.Linear(4*4*50, 500)
self.fc2 = nn.Linear(500, 10)
self.maxpool2d = nn.MaxPool2d(2, stride=2)
self.relu = nn.ReLU()
self.full_modules = [self.conv1, self.conv2, self.fc1, self.fc2]
self._init_channel_index = len(self.full_modules)*2
self.criterion = nn.CrossEntropyLoss()
def forward(self, x):
self.output = []
self.input = []
x = Variable(x.data, requires_grad=True)
self.input.append(x)
x = self.conv1(x)
self.output.append(x)
x = Variable(x.data, requires_grad=True)
self.input.append(x)
x = self.maxpool2d(x)
self.output.append(x)
x = Variable(x.data, requires_grad=True)
self.input.append(x)
x = self.relu(x)
self.output.append(x)
x = Variable(x.data, requires_grad=True)
self.input.append(x)
x = self.conv2(x)
self.output.append(x)
x = Variable(x.data, requires_grad=True)
self.input.append(x)
x = self.maxpool2d(x)
self.output.append(x)
x = Variable(x.data, requires_grad=True)
self.input.append(x)
x = self.relu(x)
self.output.append(x)
x = x.view(-1, 4*4*50)
x = Variable(x.data, requires_grad=True)
self.input.append(x)
x = self.fc1(x)
self.output.append(x)
x = Variable(x.data, requires_grad=True)
self.input.append(x)
x = self.fc2(x)
self.output.append(x)
return x
@property
def fetch_init_channel_index(self):
return self._init_channel_index
def backward_normal(self, g, communicator, req_send_check, cur_step, fail_workers, err_mode, compress_grad):
mod_avail_index = len(self.full_modules)-1
#channel_index = len(self.full_modules)*2-2
channel_index = self._init_channel_index - 2
mod_counters_ = [0]*len(self.full_modules)
for i, output in reversed(list(enumerate(self.output))):
req_send_check[-1].wait()
if i == (len(self.output) - 1):
# for last node, use g
output.backward(g)
# get gradient here after some sanity checks:
tmp_grad = self.full_modules[mod_avail_index].weight.grad
if not pd.isnull(tmp_grad):
grads = tmp_grad.data.numpy().astype(np.float64)
############################### simulation here #########################################
if communicator.Get_rank() in fail_workers:
simulation_grad = err_simulation(grad=grads, mode=err_mode)
if compress_grad == 'compress':
_compressed_grad = compress(simulation_grad)
req_isend = communicator.isend(_compressed_grad, dest=0, tag=88+channel_index)
else:
req_isend = communicator.Isend([simulation_grad, MPI.DOUBLE], dest=0, tag=88+channel_index)
else:
if compress_grad == 'compress':
_compressed_grad = compress(grads)
req_isend = communicator.isend(_compressed_grad, dest=0, tag=88+channel_index)
else:
req_isend = communicator.Isend([grads, MPI.DOUBLE], dest=0, tag=88+channel_index)
#########################################################################################
req_send_check.append(req_isend)
# update counters
mod_avail_index-=1
channel_index-=1
else:
continue
else:
output.backward(self.input[i+1].grad.data)
tmp_grad_weight = self.full_modules[mod_avail_index].weight.grad
tmp_grad_bias = self.full_modules[mod_avail_index].bias.grad
if not pd.isnull(tmp_grad_weight) and not pd.isnull(tmp_grad_bias):
# we always send bias first
if mod_counters_[mod_avail_index] == 0:
grads = tmp_grad_bias.data.numpy().astype(np.float64)
############################### simulation here #########################################
if communicator.Get_rank() in fail_workers:
simulation_grad = err_simulation(grad=grads, mode=err_mode)
if compress_grad == 'compress':
_compressed_grad = compress(simulation_grad)
req_isend = communicator.isend(_compressed_grad, dest=0, tag=88+channel_index)
else:
req_isend = communicator.Isend([simulation_grad, MPI.DOUBLE], dest=0, tag=88+channel_index)
else:
if compress_grad == 'compress':
_compressed_grad = compress(grads)
req_isend = communicator.isend(_compressed_grad, dest=0, tag=88+channel_index)
else:
req_isend = communicator.Isend([grads, MPI.DOUBLE], dest=0, tag=88+channel_index)
#########################################################################################
req_send_check.append(req_isend)
channel_index-=1
mod_counters_[mod_avail_index]+=1
elif mod_counters_[mod_avail_index] == 1:
grads = tmp_grad_weight.data.numpy().astype(np.float64)
############################### simulation here #########################################
if communicator.Get_rank() in fail_workers:
simulation_grad = err_simulation(grad=grads, mode=err_mode)
if compress_grad == 'compress':
_compressed_grad = compress(simulation_grad)
req_isend = communicator.isend(_compressed_grad, dest=0, tag=88+channel_index)
else:
req_isend = communicator.Isend([simulation_grad, MPI.DOUBLE], dest=0, tag=88+channel_index)
else:
if compress_grad == 'compress':
_compressed_grad = compress(grads)
req_isend = communicator.isend(_compressed_grad, dest=0, tag=88+channel_index)
else:
req_isend = communicator.Isend([grads, MPI.DOUBLE], dest=0, tag=88+channel_index)
#########################################################################################
req_send_check.append(req_isend)
channel_index-=1
mod_counters_[mod_avail_index]+=1
# update counters
mod_avail_index-=1
else:
continue
if mod_counters_[0] == 1:
req_send_check[-1].wait()
grads = tmp_grad_weight.data.numpy().astype(np.float64)
############################### simulation here #########################################
if communicator.Get_rank() in fail_workers:
simulation_grad = err_simulation(grad=grads, mode=err_mode)
if compress_grad == 'compress':
_compressed_grad = compress(simulation_grad)
req_isend = communicator.isend(_compressed_grad, dest=0, tag=88+channel_index)
else:
req_isend = communicator.Isend([simulation_grad, MPI.DOUBLE], dest=0, tag=88+channel_index)
else:
if compress_grad == 'compress':
_compressed_grad = compress(grads)
req_isend = communicator.isend(_compressed_grad, dest=0, tag=88+channel_index)
else:
req_isend = communicator.Isend([grads, MPI.DOUBLE], dest=0, tag=88+channel_index)
#########################################################################################
req_send_check.append(req_isend)
return req_send_check
def backward_signal_kill(self, g, communicator, req_send_check, cur_step):
'''
This killer is triggered by signals bcasting from master, channel of
signal is kept checking by each worker to determine if they're the
straggler
'''
mod_avail_index = len(self.full_modules)-1
channel_index = self._init_channel_index - 2
mod_counters_ = [0]*len(self.full_modules)
# should kill flag
should_kill = False
for i, output in reversed(list(enumerate(self.output))):
############################ killing process on workers #####################################
for _ in range(10000):
status = MPI.Status()
communicator.Iprobe(0, 77, status)
if status.Get_source() == 0:
print("Worker {}, Cur Step: {} I'm the straggler, killing myself!".format(communicator.Get_rank(), cur_step))
tmp = communicator.recv(source=0, tag=77)
should_kill = True
break
if should_kill:
break
############################################################################################
if i == (len(self.output) - 1):
# for last node, use g
output.backward(g)
# get gradient here after some sanity checks:
tmp_grad = self.full_modules[mod_avail_index].weight.grad
if not pd.isnull(tmp_grad):
grads = tmp_grad.data.numpy().astype(np.float64)
req_isend = communicator.Isend([grads, MPI.DOUBLE], dest=0, tag=88+channel_index)
req_send_check.append(req_isend)
# update counters
mod_avail_index-=1
channel_index-=1
else:
continue
else:
output.backward(self.input[i+1].grad.data)
tmp_grad_weight = self.full_modules[mod_avail_index].weight.grad
tmp_grad_bias = self.full_modules[mod_avail_index].bias.grad
if not pd.isnull(tmp_grad_weight) and not pd.isnull(tmp_grad_bias):
# we always send bias first
if mod_counters_[mod_avail_index] == 0:
grads = tmp_grad_bias.data.numpy().astype(np.float64)
req_isend = communicator.Isend([grads, MPI.DOUBLE], dest=0, tag=88+channel_index)
req_send_check.append(req_isend)
channel_index-=1
mod_counters_[mod_avail_index]+=1
elif mod_counters_[mod_avail_index] == 1:
grads = tmp_grad_weight.data.numpy().astype(np.float64)
req_isend = communicator.Isend([grads, MPI.DOUBLE], dest=0, tag=88+channel_index)
req_send_check.append(req_isend)
channel_index-=1
mod_counters_[mod_avail_index]+=1
# update counters
mod_avail_index-=1
else:
continue
if mod_counters_[0] == 1:
grads = tmp_grad_weight.data.numpy().astype(np.float64)
req_isend = communicator.Isend([grads, MPI.DOUBLE], dest=0, tag=88+channel_index)
req_send_check.append(req_isend)
return req_send_check
def backward_timeout_kill(self, g, communicator, req_send_check):
"""do we even need this?"""
pass
def backward_coded(self, g, cur_step):
grad_aggregate_list = []
mod_avail_index = len(self.full_modules)-1
#channel_index = len(self.full_modules)*2-2
channel_index = self._init_channel_index - 2
mod_counters_ = [0]*len(self.full_modules)
for i, output in reversed(list(enumerate(self.output))):
if i == (len(self.output) - 1):
# for last node, use g
output.backward(g)
else:
output.backward(self.input[i+1].grad.data)
tmp_grad_weight = self.full_modules[mod_avail_index].weight.grad
tmp_grad_bias = self.full_modules[mod_avail_index].bias.grad
# specific for this fc nn setting
if not pd.isnull(tmp_grad_weight) and not pd.isnull(tmp_grad_bias):
# we always send bias first
if mod_counters_[mod_avail_index] == 0:
grads = tmp_grad_bias.data.numpy().astype(np.float64)
grad_aggregate_list.append(grads)
channel_index-=1
mod_counters_[mod_avail_index]+=1
elif mod_counters_[mod_avail_index] == 1:
grads = tmp_grad_weight.data.numpy().astype(np.float64)
grad_aggregate_list.append(grads)
channel_index-=1
mod_counters_[mod_avail_index]+=1
# update counters
mod_avail_index-=1
else:
continue
if mod_counters_[0] == 1:
grads = tmp_grad_weight.data.numpy().astype(np.float64)
grad_aggregate_list.append(grads)
return grad_aggregate_list
| 47.286585
| 129
| 0.509929
| 1,700
| 15,510
| 4.419412
| 0.118824
| 0.062292
| 0.048449
| 0.066551
| 0.817383
| 0.804872
| 0.796087
| 0.787036
| 0.777452
| 0.768934
| 0
| 0.024346
| 0.353836
| 15,510
| 328
| 130
| 47.286585
| 0.725304
| 0.064926
| 0
| 0.830116
| 0
| 0
| 0.010797
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.03861
| false
| 0.003861
| 0.03861
| 0.007722
| 0.111969
| 0.003861
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce3dafbf90810bb67ab314bd2ae201743235dcc8
| 10,838
|
py
|
Python
|
adafruit_is31fl3741/led_glasses_map.py
|
dhalbert/Adafruit_CircuitPython_IS31FL3741
|
d1b5dc416c5cd44ac59574aaba4797ebdc445d75
|
[
"MIT"
] | null | null | null |
adafruit_is31fl3741/led_glasses_map.py
|
dhalbert/Adafruit_CircuitPython_IS31FL3741
|
d1b5dc416c5cd44ac59574aaba4797ebdc445d75
|
[
"MIT"
] | null | null | null |
adafruit_is31fl3741/led_glasses_map.py
|
dhalbert/Adafruit_CircuitPython_IS31FL3741
|
d1b5dc416c5cd44ac59574aaba4797ebdc445d75
|
[
"MIT"
] | null | null | null |
# SPDX-FileCopyrightText: 2021 Mark Komus
# SPDX-License-Identifier: MIT
"""
LED glasses mappings
"""
# Maps to link IS31FL3741 LEDs to pixels
# Full LED glasses 18 x 5 matrix
glassesmatrix_ledmap = (
65535,
65535,
65535, # (0,0) (clipped, corner)
10,
8,
9, # (0,1) / right ring pixel 20
13,
11,
12, # (0,2) / 19
16,
14,
15, # (0,3) / 18
4,
2,
3, # (0,4) / 17
217,
215,
216, # (1,0) / right ring pixel #21
220,
218,
219, # (1,1)
223,
221,
222, # (1,2)
226,
224,
225, # (1,3)
214,
212,
213, # (1,4)
187,
185,
186, # (2,0)
190,
188,
189, # (2,1)
193,
191,
192, # (2,2)
196,
194,
195, # (2,3)
184,
182,
183, # (2,4)
37,
35,
36, # (3,0)
40,
38,
39, # (3,1)
43,
41,
42, # (3,2)
46,
44,
45, # (3,3)
34,
32,
33, # (3,4)
67,
65,
66, # (4,0)
70,
68,
69, # (4,1)
73,
71,
72, # (4,2)
76,
74,
75, # (4,3)
64,
62,
63, # (4,4)
97,
95,
96, # (5,0)
100,
98,
99, # (5,1)
103,
101,
102, # (5,2)
106,
104,
105, # (5,3)
94,
92,
93, # (5,4)
127,
125,
126, # (6,0) / right ring pixel 3
130,
128,
129, # (6,1)
133,
131,
132, # (6,2)
136,
134,
135, # (6,3)
124,
122,
123, # (6,4)
157,
155,
156, # (7,0)
160,
158,
159, # (7,1)
163,
161,
162, # (7,2) / right ring pixel 5
166,
164,
165, # (7,3) / 6
244,
242,
243, # (7,4) / 7
247,
245,
246, # (8,0)
250,
248,
249, # (8,1)
253,
251,
252, # (8,2)
256,
254,
255, # (8,3)
65535,
65535,
65535, # (8,4) (clipped, nose bridge)
345,
347,
346, # (9,0)
342,
344,
343, # (9,1)
267,
269,
268, # (9,2)
263,
265,
264, # (9,3)
65535,
65535,
65535, # (9,4) (clipped, nose bridge)
336,
338,
337, # (10,0)
333,
335,
334, # (10,1)
237,
239,
238, # (10,2) / left ring pixel 19
233,
235,
234, # (10,3) / 18
348,
262,
349, # (10,4) / 17
327,
329,
328, # (11,0) / left ring pixel 21
324,
326,
325, # (11,1)
207,
209,
208, # (11,2)
203,
205,
204, # (11,3)
330,
202,
331, # (11,4)
318,
320,
319, # (12,0)
315,
317,
316, # (12,1)
177,
179,
178, # (12,2)
173,
175,
174, # (12,3)
321,
172,
322, # (12,4)
309,
311,
310, # (13,0)
306,
308,
307, # (13,1)
147,
149,
148, # (13,2)
143,
145,
144, # (13,3)
312,
142,
313, # (13,4)
300,
302,
301, # (14,0)
297,
299,
298, # (14,1)
117,
119,
118, # (14,2)
113,
115,
114, # (14,3)
303,
112,
304, # (14,4)
291,
293,
292, # (15,0)
288,
290,
289, # (15,1)
87,
89,
88, # (15,2)
83,
85,
84, # (15,3)
294,
82,
295, # (15,4)
282,
284,
283, # (16,0) / left ring pixel 3
279,
281,
280, # (16,1)
57,
59,
58, # (16,2)
53,
55,
54, # (16,3)
285,
52,
286, # (16,4)
65535,
65535,
65535, # (17,0) (clipped, corner)
270,
272,
271, # (17,1) / left ring pixel 4
27,
29,
28, # (17,2) / 5
23,
25,
24, # (17,3) / 6
276,
22,
277, # (17,4) / 7
)
# LED glasses 18 x 5 matrix but excluding LEDs shared with the eye rings
glassesmatrix_ledmap_no_ring = (
65535,
65535,
65535, # (0,0) (clipped, corner)
65535,
65535,
65535, # (0,1) / right ring pixel 20
65535,
65535,
65535, # (0,2) / 19
65535,
65535,
65535, # (0,3) / 18
65535,
65535,
65535, # (0,4) / 17
65535,
65535,
65535, # (1,0) / right ring pixel #21
220,
218,
219, # (1,1)
223,
221,
222, # (1,2)
226,
224,
225, # (1,3)
214,
212,
213, # (1,4)
187,
185,
186, # (2,0)
190,
188,
189, # (2,1)
193,
191,
192, # (2,2)
196,
194,
195, # (2,3)
184,
182,
183, # (2,4)
37,
35,
36, # (3,0)
40,
38,
39, # (3,1)
43,
41,
42, # (3,2)
46,
44,
45, # (3,3)
34,
32,
33, # (3,4)
67,
65,
66, # (4,0)
70,
68,
69, # (4,1)
73,
71,
72, # (4,2)
76,
74,
75, # (4,3)
64,
62,
63, # (4,4)
97,
95,
96, # (5,0)
100,
98,
99, # (5,1)
103,
101,
102, # (5,2)
106,
104,
105, # (5,3)
94,
92,
93, # (5,4)
127,
125,
126, # (6,0) / right ring pixel 3
130,
128,
129, # (6,1)
133,
131,
132, # (6,2)
136,
134,
135, # (6,3)
124,
122,
123, # (6,4)
157,
155,
156, # (7,0)
160,
158,
159, # (7,1)
163,
161,
162, # (7,2) / right ring pixel 5
166,
164,
165, # (7,3) / 6
244,
242,
243, # (7,4) / 7
247,
245,
246, # (8,0)
250,
248,
249, # (8,1)
253,
251,
252, # (8,2)
256,
254,
255, # (8,3)
65535,
65535,
65535, # (8,4) (clipped, nose bridge)
345,
347,
346, # (9,0)
342,
344,
343, # (9,1)
267,
269,
268, # (9,2)
263,
265,
264, # (9,3)
65535,
65535,
65535, # (9,4) (clipped, nose bridge)
336,
338,
337, # (10,0)
333,
335,
334, # (10,1)
237,
239,
238, # (10,2) / left ring pixel 19
233,
235,
234, # (10,3) / 18
348,
262,
349, # (10,4) / 17
327,
329,
328, # (11,0) / left ring pixel 21
324,
326,
325, # (11,1)
207,
209,
208, # (11,2)
203,
205,
204, # (11,3)
330,
202,
331, # (11,4)
318,
320,
319, # (12,0)
315,
317,
316, # (12,1)
177,
179,
178, # (12,2)
173,
175,
174, # (12,3)
321,
172,
322, # (12,4)
309,
311,
310, # (13,0)
306,
308,
307, # (13,1)
147,
149,
148, # (13,2)
143,
145,
144, # (13,3)
312,
142,
313, # (13,4)
300,
302,
301, # (14,0)
297,
299,
298, # (14,1)
117,
119,
118, # (14,2)
113,
115,
114, # (14,3)
303,
112,
304, # (14,4)
291,
293,
292, # (15,0)
288,
290,
289, # (15,1)
87,
89,
88, # (15,2)
83,
85,
84, # (15,3)
294,
82,
295, # (15,4)
65535,
65535,
65535, # (16,0) / left ring pixel 3
279,
281,
280, # (16,1)
57,
59,
58, # (16,2)
53,
55,
54, # (16,3)
285,
52,
286, # (16,4)
65535,
65535,
65535, # (17,0) (clipped, corner)
65535,
65535,
65535, # (17,1) / left ring pixel 4
65535,
65535,
65535, # (17,2) / 5
65535,
65535,
65535, # (17,3) / 6
65535,
65535,
65535, # (17,4) / 7
)
# Left LED glasses eye ring
left_ring_map = (
341,
210,
211, # 0
332,
180,
181, # 1
323,
150,
151, # 2
127,
125,
126, # 3
154,
152,
153, # 4
163,
161,
162, # 5
166,
164,
165, # 6
244,
242,
243, # 7
259,
257,
258, # 8
169,
167,
168, # 9
139,
137,
138, # 10
109,
107,
108, # 11
79,
77,
78, # 12
49,
47,
48, # 13
199,
197,
198, # 14
229,
227,
228, # 15
19,
17,
18, # 16
4,
2,
3, # 17
16,
14,
15, # 18
13,
11,
12, # 19
10,
8,
9, # 20
217,
215,
216, # 21
7,
5,
6, # 22
350,
240,
241, # 23
)
# Left LED glasses eye ring excluding inner LEDs shared with the 18 x 5 matrix
left_ring_map_no_inner = (
341,
210,
211, # 0
332,
180,
181, # 1
323,
150,
151, # 2
65535,
65535,
65535, # 3
65535,
65535,
65535, # 4
65535,
65535,
65535, # 5
65535,
65535,
65535, # 6
65535,
65535,
65535, # 7
259,
257,
258, # 8
169,
167,
168, # 9
139,
137,
138, # 10
109,
107,
108, # 11
79,
77,
78, # 12
49,
47,
48, # 13
199,
197,
198, # 14
229,
227,
228, # 15
19,
17,
18, # 16
4,
2,
3, # 17
16,
14,
15, # 18
13,
11,
12, # 19
10,
8,
9, # 20
217,
215,
216, # 21
7,
5,
6, # 22
350,
240,
241, # 23
)
# Right LED glasses eye ring
right_ring_map = (
287,
30,
31, # 0
278,
0,
1, # 1
273,
275,
274, # 2
282,
284,
283, # 3
270,
272,
271, # 4
27,
29,
28, # 5
23,
25,
24, # 6
276,
22,
277, # 7
20,
26,
21, # 8
50,
56,
51, # 9
80,
86,
81, # 10
110,
116,
111, # 11
140,
146,
141, # 12
170,
176,
171, # 13
200,
206,
201, # 14
230,
236,
231, # 15
260,
266,
261, # 16
348,
262,
349, # 17
233,
235,
234, # 18
237,
239,
238, # 19
339,
232,
340, # 20
327,
329,
328, # 21
305,
90,
91, # 22
296,
60,
61, # 23
)
# Right LED glasses eye ring excluding inner LEDs shared with the 18 x 5 matrix
right_ring_map_no_inner = (
287,
30,
31, # 0
278,
0,
1, # 1
273,
275,
274, # 2
282,
284,
283, # 3
270,
272,
271, # 4
27,
29,
28, # 5
23,
25,
24, # 6
276,
22,
277, # 7
20,
26,
21, # 8
50,
56,
51, # 9
80,
86,
81, # 10
110,
116,
111, # 11
140,
146,
141, # 12
170,
176,
171, # 13
200,
206,
201, # 14
230,
236,
231, # 15
260,
266,
261, # 16
65535,
65535,
65535, # 17
65535,
65535,
65535, # 18
65535,
65535,
65535, # 19
65535,
65535,
65535, # 20
65535,
65535,
65535, # 21
305,
90,
91, # 22
296,
60,
61, # 23
)
| 12.602326
| 79
| 0.3568
| 1,490
| 10,838
| 2.584564
| 0.261074
| 0.145417
| 0.109063
| 0.030901
| 0.811997
| 0.798494
| 0.755129
| 0.739548
| 0.739548
| 0.729161
| 0
| 0.548209
| 0.482285
| 10,838
| 859
| 80
| 12.616997
| 0.138122
| 0.224857
| 0
| 0.978571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ce3e7a5ec109f83bc9163f17c559ca61832f2f3b
| 162
|
py
|
Python
|
.utils/unescape_name.py
|
Symbian9/3d-models
|
8e2638a47c04bc90fc33e09ddd43567755dac0e9
|
[
"CC-BY-4.0"
] | 4
|
2020-05-18T09:58:15.000Z
|
2022-01-16T03:51:44.000Z
|
.utils/unescape_name.py
|
Symbian9/3d-models
|
8e2638a47c04bc90fc33e09ddd43567755dac0e9
|
[
"CC-BY-4.0"
] | 1
|
2022-01-17T00:00:23.000Z
|
2022-01-17T00:00:23.000Z
|
.utils/unescape_name.py
|
Symbian9/3d-models
|
8e2638a47c04bc90fc33e09ddd43567755dac0e9
|
[
"CC-BY-4.0"
] | 1
|
2021-11-18T06:50:42.000Z
|
2021-11-18T06:50:42.000Z
|
#!/usr/bin/python
import sys
import re
print(sys.argv[1].replace('\\', ''))
#print(re.sub("(!|\$|#|&|\"|\'|\(|\)|\||<|>|`|\\\|;| )", r"\\\1", sys.argv[1])+'\n')
| 23.142857
| 84
| 0.432099
| 21
| 162
| 3.333333
| 0.619048
| 0.2
| 0.228571
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02
| 0.074074
| 162
| 6
| 85
| 27
| 0.446667
| 0.604938
| 0
| 0
| 0
| 0
| 0.032787
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.666667
| 0
| 0.666667
| 0.333333
| 1
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cbfe183234a4657df540778d8c7a8bbdd8ca3adc
| 107
|
py
|
Python
|
montepython/likelihoods/Planck_lowl_TT/__init__.py
|
archaeo-pteryx/montepython_public
|
6fbcaa3266fd3a10a8e3ed4190dc65e6f29f1a37
|
[
"MIT"
] | 69
|
2018-04-20T07:38:33.000Z
|
2022-03-11T06:55:36.000Z
|
montepython/likelihoods/Planck_lowl_TT/__init__.py
|
archaeo-pteryx/montepython_public
|
6fbcaa3266fd3a10a8e3ed4190dc65e6f29f1a37
|
[
"MIT"
] | 263
|
2018-05-20T21:58:11.000Z
|
2022-03-30T21:45:48.000Z
|
montepython/likelihoods/Planck_lowl_TT/__init__.py
|
archaeo-pteryx/montepython_public
|
6fbcaa3266fd3a10a8e3ed4190dc65e6f29f1a37
|
[
"MIT"
] | 78
|
2018-04-21T13:11:54.000Z
|
2022-02-01T01:57:31.000Z
|
from montepython.likelihood_class import Likelihood_clik
class Planck_lowl_TT(Likelihood_clik):
pass
| 17.833333
| 56
| 0.841121
| 14
| 107
| 6.071429
| 0.714286
| 0.329412
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121495
| 107
| 5
| 57
| 21.4
| 0.904255
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.333333
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
022890b846fd86bfb791624452a74c07336aa3e5
| 2,016
|
py
|
Python
|
runs.py
|
bentrevett/pytorch-for-code
|
1dd35e4b80eba8b1eae687d4f9c4a1c4d8122d0a
|
[
"MIT"
] | 9
|
2020-01-20T12:59:45.000Z
|
2021-03-05T02:46:44.000Z
|
runs.py
|
bentrevett/pytorch-for-code
|
1dd35e4b80eba8b1eae687d4f9c4a1c4d8122d0a
|
[
"MIT"
] | null | null | null |
runs.py
|
bentrevett/pytorch-for-code
|
1dd35e4b80eba8b1eae687d4f9c4a1c4d8122d0a
|
[
"MIT"
] | null | null | null |
import subprocess
for seed in [1,2,3,4,5]:
train_data = 'data/codesearchnet/java_train_bpe_10000_0.5.jsonl'
valid_data = 'data/codesearchnet/java_valid_bpe_10000_0.5.jsonl'
test_data = 'data/codesearchnet/java_test_bpe_10000_0.5.jsonl'
code_vocab = 'data/codesearchnet/java-bpe-10000-0.5_code_vocab.jsonl'
desc_vocab = 'data/codesearchnet/java-bpe-10000-0.5_desc_vocab.jsonl'
for model in ['nbow', 'rnn', 'cnn', 'transformer']:
command = f'python code_retrieval_{model}.py --train_data {train_data} --valid_data {valid_data} --test_data {test_data} --code_vocab {code_vocab} --desc_vocab {desc_vocab} --seed {seed}'
process = subprocess.Popen(command, shell=True)
process.wait()
train_data = 'data/codesearchnet/6L_train.jsonl'
valid_data = 'data/codesearchnet/6L_valid.jsonl'
test_data = 'data/codesearchnet/6L_test.jsonl'
code_vocab = 'data/codesearchnet/6L_code_vocab.jsonl'
desc_vocab = 'data/codesearchnet/6L_desc_vocab.jsonl'
for model in ['nbow', 'rnn', 'cnn', 'transformer']:
command = f'python code_retrieval_{model}.py --train_data {train_data} --valid_data {valid_data} --test_data {test_data} --code_vocab {code_vocab} --desc_vocab {desc_vocab} --seed {seed}'
process = subprocess.Popen(command, shell=True)
process.wait()
train_data = 'data/codesearchnet/6L_train_bpe_10000_0.5.jsonl'
valid_data = 'data/codesearchnet/6L_valid_bpe_10000_0.5.jsonl'
test_data = 'data/codesearchnet/6L_test_bpe_10000_0.5.jsonl'
code_vocab = 'data/codesearchnet/6L-bpe-10000-0.5_code_vocab.jsonl'
desc_vocab = 'data/codesearchnet/6L-bpe-10000-0.5_desc_vocab.jsonl'
for model in ['nbow', 'rnn', 'cnn', 'transformer']:
command = f'python code_retrieval_{model}.py --train_data {train_data} --valid_data {valid_data} --test_data {test_data} --code_vocab {code_vocab} --desc_vocab {desc_vocab} --seed {seed}'
process = subprocess.Popen(command, shell=True)
process.wait()
| 49.170732
| 195
| 0.718254
| 293
| 2,016
| 4.634812
| 0.133106
| 0.187776
| 0.066274
| 0.073638
| 0.949926
| 0.949926
| 0.925626
| 0.868189
| 0.828424
| 0.828424
| 0
| 0.049247
| 0.143849
| 2,016
| 40
| 196
| 50.4
| 0.737543
| 0
| 0
| 0.413793
| 0
| 0.103448
| 0.623821
| 0.37072
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.034483
| 0
| 0.034483
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
022af7542fb14ec5ee60dc3bea71ff05e6b23ea5
| 99
|
py
|
Python
|
utils/__init__.py
|
pedrocg42/awesome-cv-projects
|
928c48aa305d1cd0cd67412659a87ecc6fb6f8b0
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
pedrocg42/awesome-cv-projects
|
928c48aa305d1cd0cd67412659a87ecc6fb6f8b0
|
[
"MIT"
] | null | null | null |
utils/__init__.py
|
pedrocg42/awesome-cv-projects
|
928c48aa305d1cd0cd67412659a87ecc6fb6f8b0
|
[
"MIT"
] | null | null | null |
from utils.object_detection import *
from utils.pose_estimation import *
from utils.utils import *
| 24.75
| 36
| 0.818182
| 14
| 99
| 5.642857
| 0.5
| 0.341772
| 0.379747
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 99
| 3
| 37
| 33
| 0.908046
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0284975082cdb2dc0c6ef3c5f34204a7ad83bd14
| 31,228
|
py
|
Python
|
backend/tests/baserow/contrib/database/ws/public/test_public_ws_rows_signals.py
|
ashishdhngr/baserow
|
b098678d2165eb7c42930ee24dc6753a3cb520c3
|
[
"MIT"
] | 1
|
2022-01-24T15:12:02.000Z
|
2022-01-24T15:12:02.000Z
|
backend/tests/baserow/contrib/database/ws/public/test_public_ws_rows_signals.py
|
rasata/baserow
|
c6e1d7842c53f801e1c96b49f1377da2a06afaa9
|
[
"MIT"
] | null | null | null |
backend/tests/baserow/contrib/database/ws/public/test_public_ws_rows_signals.py
|
rasata/baserow
|
c6e1d7842c53f801e1c96b49f1377da2a06afaa9
|
[
"MIT"
] | null | null | null |
from unittest.mock import patch, call, ANY
import pytest
from django.db import transaction
from baserow.contrib.database.api.constants import PUBLIC_PLACEHOLDER_ENTITY_ID
from baserow.contrib.database.rows.handler import RowHandler
from baserow.contrib.database.views.handler import ViewHandler
from baserow.core.trash.handler import TrashHandler
@pytest.mark.django_db(transaction=True)
@patch("baserow.ws.registries.broadcast_to_channel_group")
def test_when_row_created_public_views_receive_restricted_row_created_ws_event(
mock_broadcast_to_channel_group, data_fixture
):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
visible_field = data_fixture.create_text_field(table=table)
hidden_field = data_fixture.create_text_field(table=table)
public_view_only_showing_one_field = data_fixture.create_grid_view(
user, create_options=False, table=table, public=True, order=0
)
public_view_showing_all_fields = data_fixture.create_grid_view(
user, table=table, public=True, order=1
)
# No public events should be sent to this form view
data_fixture.create_form_view(user, table=table, public=True)
data_fixture.create_grid_view_field_option(
public_view_only_showing_one_field, hidden_field, hidden=True
)
row = RowHandler().create_row(
user=user,
table=table,
values={
f"field_{visible_field.id}": "Visible",
f"field_{hidden_field.id}": "Hidden",
},
)
assert mock_broadcast_to_channel_group.delay.mock_calls == (
[
call(f"table-{table.id}", ANY, ANY),
call(
f"view-{public_view_only_showing_one_field.slug}",
{
"type": "row_created",
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"row": {
"id": row.id,
"order": "1.00000000000000000000",
# Only the visible field should be sent
f"field_{visible_field.id}": "Visible",
},
"metadata": {},
"before_row_id": None,
},
None,
),
call(
f"view-{public_view_showing_all_fields.slug}",
{
"type": "row_created",
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"row": {
"id": row.id,
"order": "1.00000000000000000000",
f"field_{visible_field.id}": "Visible",
# This field is not hidden for this public view and so should be
# included
f"field_{hidden_field.id}": "Hidden",
},
"metadata": {},
"before_row_id": None,
},
None,
),
]
)
@pytest.mark.django_db(transaction=True)
@patch("baserow.ws.registries.broadcast_to_channel_group")
def test_when_row_created_public_views_receive_row_created_only_when_filters_match(
mock_broadcast_to_channel_group, data_fixture
):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
visible_field = data_fixture.create_text_field(table=table)
hidden_field = data_fixture.create_text_field(table=table)
public_view_showing_row = data_fixture.create_grid_view(
user, create_options=False, table=table, public=True, order=0
)
public_view_hiding_row = data_fixture.create_grid_view(
user, table=table, public=True, order=1
)
# Should not appear in any results
data_fixture.create_form_view(user, table=table, public=True)
data_fixture.create_grid_view_field_option(
public_view_showing_row, hidden_field, hidden=True
)
data_fixture.create_grid_view_field_option(
public_view_hiding_row, hidden_field, hidden=True
)
# Match the visible field
data_fixture.create_view_filter(
view=public_view_hiding_row, field=visible_field, type="equal", value="Visible"
)
# But filter out based on the hidden field
data_fixture.create_view_filter(
view=public_view_hiding_row, field=hidden_field, type="equal", value="Not Match"
)
# Match
data_fixture.create_view_filter(
view=public_view_showing_row, field=visible_field, type="equal", value="Visible"
)
# Match
data_fixture.create_view_filter(
view=public_view_showing_row, field=hidden_field, type="equal", value="Hidden"
)
row = RowHandler().create_row(
user=user,
table=table,
values={
f"field_{visible_field.id}": "Visible",
f"field_{hidden_field.id}": "Hidden",
},
)
assert mock_broadcast_to_channel_group.delay.mock_calls == (
[
call(f"table-{table.id}", ANY, ANY),
call(
f"view-{public_view_showing_row.slug}",
{
"type": "row_created",
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"row": {
"id": row.id,
"order": "1.00000000000000000000",
# Only the visible field should be sent
f"field_{visible_field.id}": "Visible",
},
"metadata": {},
"before_row_id": None,
},
None,
),
]
)
@pytest.mark.django_db(transaction=True)
@patch("baserow.ws.registries.broadcast_to_channel_group")
def test_when_row_deleted_public_views_receive_restricted_row_deleted_ws_event(
mock_broadcast_to_channel_group, data_fixture
):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
visible_field = data_fixture.create_text_field(table=table)
hidden_field = data_fixture.create_text_field(table=table)
public_view_only_showing_one_field = data_fixture.create_grid_view(
user, create_options=False, table=table, public=True, order=0
)
public_view_showing_all_fields = data_fixture.create_grid_view(
user, table=table, public=True, order=1
)
# Should not appear in any results
data_fixture.create_form_view(user, table=table, public=True)
data_fixture.create_grid_view_field_option(
public_view_only_showing_one_field, hidden_field, hidden=True
)
model = table.get_model()
row = model.objects.create(
**{
f"field_{visible_field.id}": "Visible",
f"field_{hidden_field.id}": "Hidden",
},
)
RowHandler().delete_row(user, table, row.id, model)
assert mock_broadcast_to_channel_group.delay.mock_calls == (
[
call(f"table-{table.id}", ANY, ANY),
call(
f"view-{public_view_only_showing_one_field.slug}",
{
"type": "row_deleted",
"row_id": row.id,
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"row": {
"id": row.id,
"order": "1.00000000000000000000",
# Only the visible field should be sent
f"field_{visible_field.id}": "Visible",
},
},
None,
),
call(
f"view-{public_view_showing_all_fields.slug}",
{
"type": "row_deleted",
"row_id": row.id,
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"row": {
"id": row.id,
"order": "1.00000000000000000000",
f"field_{visible_field.id}": "Visible",
# This field is not hidden for this public view and so should be
# included
f"field_{hidden_field.id}": "Hidden",
},
},
None,
),
]
)
@pytest.mark.django_db(transaction=True)
@patch("baserow.ws.registries.broadcast_to_channel_group")
def test_when_row_deleted_public_views_receive_row_deleted_only_when_filters_match(
mock_broadcast_to_channel_group, data_fixture
):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
visible_field = data_fixture.create_text_field(table=table)
hidden_field = data_fixture.create_text_field(table=table)
public_view_showing_row = data_fixture.create_grid_view(
user, create_options=False, table=table, public=True, order=0
)
public_view_hiding_row = data_fixture.create_grid_view(
user, table=table, public=True, order=1
)
# Should not appear in any results
data_fixture.create_form_view(user, table=table, public=True)
data_fixture.create_grid_view_field_option(
public_view_showing_row, hidden_field, hidden=True
)
data_fixture.create_grid_view_field_option(
public_view_hiding_row, hidden_field, hidden=True
)
# Match the visible field
data_fixture.create_view_filter(
view=public_view_hiding_row, field=visible_field, type="equal", value="Visible"
)
# But filter out based on the hidden field
data_fixture.create_view_filter(
view=public_view_hiding_row, field=hidden_field, type="equal", value="Not Match"
)
# Match
data_fixture.create_view_filter(
view=public_view_showing_row, field=visible_field, type="equal", value="Visible"
)
# Match
data_fixture.create_view_filter(
view=public_view_showing_row, field=hidden_field, type="equal", value="Hidden"
)
model = table.get_model()
row = model.objects.create(
**{
f"field_{visible_field.id}": "Visible",
f"field_{hidden_field.id}": "Hidden",
},
)
RowHandler().delete_row(user, table, row.id, model)
assert mock_broadcast_to_channel_group.delay.mock_calls == (
[
call(f"table-{table.id}", ANY, ANY),
call(
f"view-{public_view_showing_row.slug}",
{
"type": "row_deleted",
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"row_id": row.id,
"row": {
"id": row.id,
"order": "1.00000000000000000000",
# Only the visible field should be sent
f"field_{visible_field.id}": "Visible",
},
},
None,
),
]
)
@pytest.mark.django_db(transaction=True)
@patch("baserow.ws.registries.broadcast_to_channel_group")
def test_given_row_not_visible_in_public_view_when_updated_to_be_visible_event_sent(
mock_broadcast_to_channel_group, data_fixture
):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
visible_field = data_fixture.create_text_field(table=table)
hidden_field = data_fixture.create_text_field(table=table)
public_view_with_filters_initially_hiding_all_rows = data_fixture.create_grid_view(
user, create_options=False, table=table, public=True, order=0
)
# Should not appear in any results
data_fixture.create_form_view(user, table=table, public=True)
data_fixture.create_grid_view_field_option(
public_view_with_filters_initially_hiding_all_rows, hidden_field, hidden=True
)
# Match the visible field
data_fixture.create_view_filter(
view=public_view_with_filters_initially_hiding_all_rows,
field=visible_field,
type="equal",
value="Visible",
)
# But filter out based on the hidden field
data_fixture.create_view_filter(
view=public_view_with_filters_initially_hiding_all_rows,
field=hidden_field,
type="equal",
value="ValueWhichMatchesFilter",
)
model = table.get_model()
initially_hidden_row = model.objects.create(
**{
f"field_{visible_field.id}": "Visible",
f"field_{hidden_field.id}": "ValueWhichDoesntMatchFilter",
},
)
# Double check the row isn't visible in any views to begin with
row_checker = ViewHandler().get_public_views_row_checker(
table, model, only_include_views_which_want_realtime_events=True
)
assert row_checker.get_public_views_where_row_is_visible(initially_hidden_row) == []
RowHandler().update_row(
user,
table,
initially_hidden_row.id,
values={f"field_{hidden_field.id}": "ValueWhichMatchesFilter"},
)
assert mock_broadcast_to_channel_group.delay.mock_calls == (
[
call(f"table-{table.id}", ANY, ANY),
call(
f"view-{public_view_with_filters_initially_hiding_all_rows.slug}",
{
# The row should appear as a created event as for the public view
# it effectively has been created as it didn't exist before.
"type": "row_created",
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"row": {
"id": initially_hidden_row.id,
"order": "1.00000000000000000000",
# Only the visible field should be sent
f"field_{visible_field.id}": "Visible",
},
"metadata": {},
"before_row_id": None,
},
None,
),
]
)
@pytest.mark.django_db(transaction=True)
@patch("baserow.ws.registries.broadcast_to_channel_group")
def test_given_row_visible_in_public_view_when_updated_to_be_not_visible_event_sent(
mock_broadcast_to_channel_group, data_fixture
):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
visible_field = data_fixture.create_text_field(table=table)
hidden_field = data_fixture.create_text_field(table=table)
public_view_with_row_showing = data_fixture.create_grid_view(
user, create_options=False, table=table, public=True, order=0
)
# Should not appear in any results
data_fixture.create_form_view(user, table=table, public=True)
data_fixture.create_grid_view_field_option(
public_view_with_row_showing, hidden_field, hidden=True
)
# Match the visible field
data_fixture.create_view_filter(
view=public_view_with_row_showing,
field=visible_field,
type="contains",
value="Visible",
)
# But filter out based on the hidden field
data_fixture.create_view_filter(
view=public_view_with_row_showing,
field=hidden_field,
type="equal",
value="ValueWhichMatchesFilter",
)
model = table.get_model()
initially_visible_row = model.objects.create(
**{
f"field_{visible_field.id}": "Visible",
f"field_{hidden_field.id}": "ValueWhichMatchesFilter",
},
)
# Double check the row is visible in the view to start with
row_checker = ViewHandler().get_public_views_row_checker(
table, model, only_include_views_which_want_realtime_events=True
)
assert row_checker.get_public_views_where_row_is_visible(initially_visible_row) == [
public_view_with_row_showing.view_ptr
]
# Update the row so it is no longer visible
RowHandler().update_row(
user,
table,
initially_visible_row.id,
values={
f"field_{hidden_field.id}": "ValueWhichDoesNotMatchFilter",
f"field_{visible_field.id}": "StillVisibleButNew",
},
)
assert mock_broadcast_to_channel_group.delay.mock_calls == (
[
call(f"table-{table.id}", ANY, ANY),
call(
f"view-{public_view_with_row_showing.slug}",
{
# The row should appear as a deleted event as for the public view
# it effectively has been.
"type": "row_deleted",
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"row_id": initially_visible_row.id,
"row": {
"id": initially_visible_row.id,
"order": "1.00000000000000000000",
# Only the visible field should be sent in its state before it
# was updated
f"field_{visible_field.id}": "Visible",
},
},
None,
),
]
)
@pytest.mark.django_db(transaction=True)
@patch("baserow.ws.registries.broadcast_to_channel_group")
def test_given_row_visible_in_public_view_when_updated_to_still_be_visible_event_sent(
mock_broadcast_to_channel_group, data_fixture
):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
visible_field = data_fixture.create_text_field(table=table)
hidden_field = data_fixture.create_text_field(table=table)
public_view_with_row_showing = data_fixture.create_grid_view(
user, create_options=False, table=table, public=True, order=0
)
# Should not appear in any results
data_fixture.create_form_view(user, table=table, public=True)
data_fixture.create_grid_view_field_option(
public_view_with_row_showing, hidden_field, hidden=True
)
# Match the visible field
data_fixture.create_view_filter(
view=public_view_with_row_showing,
field=visible_field,
type="contains",
value="Visible",
)
# But filter out based on the hidden field
data_fixture.create_view_filter(
view=public_view_with_row_showing,
field=hidden_field,
type="contains",
value="e",
)
model = table.get_model()
initially_visible_row = model.objects.create(
**{
f"field_{visible_field.id}": "Visible",
f"field_{hidden_field.id}": "e",
},
)
# Double check the row is visible in the view to start with
row_checker = ViewHandler().get_public_views_row_checker(
table, model, only_include_views_which_want_realtime_events=True
)
assert row_checker.get_public_views_where_row_is_visible(initially_visible_row) == [
public_view_with_row_showing.view_ptr
]
# Update the row so it is still visible but changed
RowHandler().update_row(
user,
table,
initially_visible_row.id,
values={
f"field_{hidden_field.id}": "eee",
f"field_{visible_field.id}": "StillVisibleButUpdated",
},
)
assert mock_broadcast_to_channel_group.delay.mock_calls == (
[
call(f"table-{table.id}", ANY, ANY),
call(
f"view-{public_view_with_row_showing.slug}",
{
"type": "row_updated",
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"row_before_update": {
"id": initially_visible_row.id,
"order": "1.00000000000000000000",
# Only the visible field should be sent
f"field_{visible_field.id}": "Visible",
},
"row": {
"id": initially_visible_row.id,
"order": "1.00000000000000000000",
# Only the visible field should be sent
f"field_{visible_field.id}": "StillVisibleButUpdated",
},
"metadata": {},
},
None,
),
]
)
@pytest.mark.django_db(transaction=True)
@patch("baserow.ws.registries.broadcast_to_channel_group")
def test_when_row_restored_public_views_receive_restricted_row_created_ws_event(
mock_broadcast_to_channel_group, data_fixture
):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
visible_field = data_fixture.create_text_field(table=table)
hidden_field = data_fixture.create_text_field(table=table)
public_view_only_showing_one_field = data_fixture.create_grid_view(
user, create_options=False, table=table, public=True, order=0
)
public_view_showing_all_fields = data_fixture.create_grid_view(
user, table=table, public=True, order=1
)
# Should not appear in any results
data_fixture.create_form_view(user, table=table, public=True)
data_fixture.create_grid_view_field_option(
public_view_only_showing_one_field, hidden_field, hidden=True
)
model = table.get_model()
row = model.objects.create(
**{
f"field_{visible_field.id}": "Visible",
f"field_{hidden_field.id}": "Hidden",
},
)
TrashHandler.trash(
user, table.database.group, table.database, row, parent_id=table.id
)
TrashHandler.restore_item(user, "row", row.id, parent_trash_item_id=table.id)
assert mock_broadcast_to_channel_group.delay.mock_calls == (
[
call(f"table-{table.id}", ANY, ANY),
call(
f"view-{public_view_only_showing_one_field.slug}",
{
"type": "row_created",
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"row": {
"id": row.id,
"order": "1.00000000000000000000",
# Only the visible field should be sent
f"field_{visible_field.id}": "Visible",
},
"metadata": {},
"before_row_id": None,
},
None,
),
call(
f"view-{public_view_showing_all_fields.slug}",
{
"type": "row_created",
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"row": {
"id": row.id,
"order": "1.00000000000000000000",
f"field_{visible_field.id}": "Visible",
# This field is not hidden for this public view and so should be
# included
f"field_{hidden_field.id}": "Hidden",
},
"metadata": {},
"before_row_id": None,
},
None,
),
]
)
@pytest.mark.django_db(transaction=True)
@patch("baserow.ws.registries.broadcast_to_channel_group")
def test_when_row_restored_public_views_receive_row_created_only_when_filters_match(
mock_broadcast_to_channel_group, data_fixture
):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
visible_field = data_fixture.create_text_field(table=table)
hidden_field = data_fixture.create_text_field(table=table)
public_view_showing_row = data_fixture.create_grid_view(
user, create_options=False, table=table, public=True, order=0
)
public_view_hiding_row = data_fixture.create_grid_view(
user, table=table, public=True, order=1
)
# Should not appear in any results
data_fixture.create_form_view(user, table=table, public=True)
data_fixture.create_grid_view_field_option(
public_view_showing_row, hidden_field, hidden=True
)
data_fixture.create_grid_view_field_option(
public_view_hiding_row, hidden_field, hidden=True
)
# Match the visible field
data_fixture.create_view_filter(
view=public_view_hiding_row, field=visible_field, type="equal", value="Visible"
)
# But filter out based on the hidden field
data_fixture.create_view_filter(
view=public_view_hiding_row, field=hidden_field, type="equal", value="Not Match"
)
# Match
data_fixture.create_view_filter(
view=public_view_showing_row, field=visible_field, type="equal", value="Visible"
)
# Match
data_fixture.create_view_filter(
view=public_view_showing_row, field=hidden_field, type="equal", value="Hidden"
)
model = table.get_model()
row = model.objects.create(
**{
f"field_{visible_field.id}": "Visible",
f"field_{hidden_field.id}": "Hidden",
},
)
TrashHandler.trash(
user, table.database.group, table.database, row, parent_id=table.id
)
TrashHandler.restore_item(user, "row", row.id, parent_trash_item_id=table.id)
assert mock_broadcast_to_channel_group.delay.mock_calls == (
[
call(f"table-{table.id}", ANY, ANY),
call(
f"view-{public_view_showing_row.slug}",
{
"type": "row_created",
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"row": {
"id": row.id,
"order": "1.00000000000000000000",
# Only the visible field should be sent
f"field_{visible_field.id}": "Visible",
},
"metadata": {},
"before_row_id": None,
},
None,
),
]
)
@pytest.mark.django_db(transaction=True)
@patch("baserow.ws.registries.broadcast_to_channel_group")
def test_given_row_visible_in_public_view_when_moved_row_updated_sent(
mock_broadcast_to_channel_group, data_fixture
):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
visible_field = data_fixture.create_text_field(table=table)
hidden_field = data_fixture.create_text_field(table=table)
public_view = data_fixture.create_grid_view(
user, create_options=False, table=table, public=True, order=0
)
# Should not appear in any results
data_fixture.create_form_view(user, table=table, public=True)
data_fixture.create_grid_view_field_option(public_view, hidden_field, hidden=True)
# Match the visible field
data_fixture.create_view_filter(
view=public_view,
field=visible_field,
type="contains",
value="Visible",
)
# But filter out based on the hidden field
data_fixture.create_view_filter(
view=public_view,
field=hidden_field,
type="equal",
value="ValueWhichMatchesFilter",
)
model = table.get_model()
visible_moving_row = model.objects.create(
**{
f"field_{visible_field.id}": "Visible",
f"field_{hidden_field.id}": "ValueWhichMatchesFilter",
},
)
invisible_row = model.objects.create(
**{
f"field_{visible_field.id}": "Visible",
f"field_{hidden_field.id}": "ValueWhichDoesNotMatchesFilter",
},
)
# Double check the row is visible in the view to start with
row_checker = ViewHandler().get_public_views_row_checker(
table, model, only_include_views_which_want_realtime_events=True
)
assert row_checker.get_public_views_where_row_is_visible(visible_moving_row) == [
public_view.view_ptr
]
# Move the visible row behind the invisible one
with transaction.atomic():
RowHandler().move_row(
user, table, visible_moving_row.id, before=invisible_row, model=model
)
assert mock_broadcast_to_channel_group.delay.mock_calls == (
[
call(f"table-{table.id}", ANY, ANY),
call(
f"view-{public_view.slug}",
{
# The row should appear as a deleted event as for the public view
# it effectively has been.
"type": "row_updated",
"table_id": PUBLIC_PLACEHOLDER_ENTITY_ID,
"row_before_update": {
"id": visible_moving_row.id,
"order": "1.00000000000000000000",
# Only the visible field should be sent
f"field_{visible_field.id}": "Visible",
},
"row": {
"id": visible_moving_row.id,
"order": "0.99999999999999999999",
# Only the visible field should be sent
f"field_{visible_field.id}": "Visible",
},
"metadata": {},
},
None,
),
]
)
@pytest.mark.django_db(transaction=True)
@patch("baserow.ws.registries.broadcast_to_channel_group")
def test_given_row_invisible_in_public_view_when_moved_no_update_sent(
mock_broadcast_to_channel_group, data_fixture
):
user = data_fixture.create_user()
table = data_fixture.create_database_table(user=user)
visible_field = data_fixture.create_text_field(table=table)
hidden_field = data_fixture.create_text_field(table=table)
public_view = data_fixture.create_grid_view(
user, create_options=False, table=table, public=True, order=0
)
# Should not appear in any results
data_fixture.create_form_view(user, table=table, public=True)
data_fixture.create_grid_view_field_option(public_view, hidden_field, hidden=True)
# Match the visible field
data_fixture.create_view_filter(
view=public_view,
field=visible_field,
type="contains",
value="Visible",
)
# But filter out based on the hidden field
data_fixture.create_view_filter(
view=public_view,
field=hidden_field,
type="equal",
value="ValueWhichMatchesFilter",
)
model = table.get_model()
visible_row = model.objects.create(
**{
f"field_{visible_field.id}": "Visible",
f"field_{hidden_field.id}": "ValueWhichMatchesFilter",
},
)
invisible_moving_row = model.objects.create(
**{
f"field_{visible_field.id}": "Visible",
f"field_{hidden_field.id}": "ValueWhichDoesNotMatchesFilter",
},
)
# Double check the row is visible in the view to start with
row_checker = ViewHandler().get_public_views_row_checker(
table, model, only_include_views_which_want_realtime_events=True
)
assert row_checker.get_public_views_where_row_is_visible(invisible_moving_row) == []
# Move the invisible row
with transaction.atomic():
RowHandler().move_row(
user, table, invisible_moving_row.id, before=visible_row, model=model
)
assert mock_broadcast_to_channel_group.delay.mock_calls == (
[
call(f"table-{table.id}", ANY, ANY),
]
)
| 36.523977
| 88
| 0.608396
| 3,563
| 31,228
| 4.968285
| 0.045187
| 0.073946
| 0.103717
| 0.050955
| 0.952039
| 0.948706
| 0.939668
| 0.937578
| 0.925376
| 0.922721
| 0
| 0.015165
| 0.29893
| 31,228
| 854
| 89
| 36.566745
| 0.793404
| 0.07679
| 0
| 0.704735
| 0
| 0
| 0.147799
| 0.100549
| 0
| 0
| 0
| 0
| 0.022284
| 1
| 0.01532
| false
| 0
| 0.009749
| 0
| 0.02507
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a14dc24fffc51f42daf42ee1564bd30c0740fd4
| 81
|
py
|
Python
|
interactions/ext/voice/__init__.py
|
interactions-py/voice
|
03826fd9ea319217b287aa66c5d67f26b815c6c1
|
[
"MIT"
] | null | null | null |
interactions/ext/voice/__init__.py
|
interactions-py/voice
|
03826fd9ea319217b287aa66c5d67f26b815c6c1
|
[
"MIT"
] | null | null | null |
interactions/ext/voice/__init__.py
|
interactions-py/voice
|
03826fd9ea319217b287aa66c5d67f26b815c6c1
|
[
"MIT"
] | 1
|
2022-03-12T20:41:47.000Z
|
2022-03-12T20:41:47.000Z
|
from .client import * # noqa: F401 F403
from .state import * # noqa: F401 F403
| 27
| 40
| 0.679012
| 12
| 81
| 4.583333
| 0.583333
| 0.363636
| 0.509091
| 0.654545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.190476
| 0.222222
| 81
| 2
| 41
| 40.5
| 0.68254
| 0.382716
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
5a3e5c58abbc8801bb6306c37e56ef2420cdd53a
| 184
|
py
|
Python
|
django/mysite/learn/models.py
|
tochenwei/mypython.github.io
|
10e7917b15dee219751e76ed49d9f72c31bc6a27
|
[
"Apache-2.0"
] | null | null | null |
django/mysite/learn/models.py
|
tochenwei/mypython.github.io
|
10e7917b15dee219751e76ed49d9f72c31bc6a27
|
[
"Apache-2.0"
] | null | null | null |
django/mysite/learn/models.py
|
tochenwei/mypython.github.io
|
10e7917b15dee219751e76ed49d9f72c31bc6a27
|
[
"Apache-2.0"
] | null | null | null |
from __future__ import unicode_literals
from django.db import models
class Person(models.Model):
name = models.CharField(max_length=20)
age = models.CharField(max_length=2)
| 23
| 42
| 0.771739
| 26
| 184
| 5.192308
| 0.692308
| 0.222222
| 0.266667
| 0.355556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.019108
| 0.146739
| 184
| 7
| 43
| 26.285714
| 0.840764
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.4
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
5a551a9722f23ba6444fdcd1e47b004a2eecc745
| 43,141
|
py
|
Python
|
Application/Jobs/edge_edline.py
|
cristi161/eecvf
|
519c488bd47f697ef51e88823f7a751a52677b88
|
[
"MIT"
] | 1
|
2021-04-02T15:33:12.000Z
|
2021-04-02T15:33:12.000Z
|
Application/Jobs/edge_edline.py
|
cristi161/eecvf
|
519c488bd47f697ef51e88823f7a751a52677b88
|
[
"MIT"
] | null | null | null |
Application/Jobs/edge_edline.py
|
cristi161/eecvf
|
519c488bd47f697ef51e88823f7a751a52677b88
|
[
"MIT"
] | 1
|
2021-08-14T09:07:22.000Z
|
2021-08-14T09:07:22.000Z
|
# noinspection PyPackageRequirements
from typing import Tuple
import thinning
# Do not delete used indirectly
# noinspection PyUnresolvedReferences
from Application.Frame import transferJobPorts
from Application.Frame.global_variables import JobInitStateReturn
from Application.Frame.transferJobPorts import get_port_from_wave
from Utils.log_handler import log_error_to_console
from Application.Config.create_config import jobs_dict, create_dictionary_element
from config_main import PYRAMID_LEVEL, FILTERS
from Application.Config.util import transform_port_name_lvl, transform_port_size_lvl, job_name_create, get_module_name_from_file
import Application.Jobs.kernels
from Application.Jobs.external.EDLine.EdgeDrawing import EdgeDrawing
from Application.Jobs.ed_lines_modified import EdgeDrawing_modified
from Application.Jobs.external.EDLine.LineDetector import EDLine
import numpy as np
import cv2
############################################################################################################################################
# Init functions
############################################################################################################################################
def init_func_edge_drawing() -> JobInitStateReturn:
"""
Init function for the draw edge algorithm
:param port_list: Param needed list of port names [input, wave_of_input, do_smoothing, edges_output, edge_map_output]
:return: INIT or NOT_INIT state for the job
"""
return JobInitStateReturn(True)
def init_func_ed_lines() -> JobInitStateReturn:
"""
Init function for the draw edge algorithm
:return: INIT or NOT_INIT state for the job
"""
return JobInitStateReturn(True)
############################################################################################################################################
# Main functions
############################################################################################################################################
def main_edge_drawing_func(port_list: list = None) -> bool:
"""
Edge segment detection algorithm that runs real-time and produces high quality edge segments, each of which is a linear pixel chain.
Unlike traditional edge detectors, which work on the thresholded gradient magnitude cluster to determine edge elements, our method first
spots sparse points along rows and columns called anchors, and then joins these anchors via a smart, heuristic edge tracing procedure,
hence the name Edge Drawing (ED). ED produces edge maps that always consist of clean, perfectly contiguous, well-localized, one-pixel
wide edges.
:param port_list: Param needed list of port names [input, wave_of_input, do_smoothing, edges_output, edge_map_output]
:return: True if the job executed OK.
"""
# noinspection PyPep8Naming
PORT_IN_POS = 0
# noinspection PyPep8Naming
PORT_IN_WAVE = 1
# noinspection PyPep8Naming
PORT_IN_SMOOTHING = 2
# noinspection PyPep8Naming
PORT_GAUSS_KERNEL_SIZE = 3
# noinspection PyPep8Naming
PORT_GAUSS_SIGMA = 4
# noinspection PyPep8Naming
PORT_GRAD_THR = 5
# noinspection PyPep8Naming
PORT_ANCHOR_THR = 6
# noinspection PyPep8Naming
PORT_SCAN_INTERVAL = 7
# noinspection PyPep8Naming
PORT_OUT_EDGES_POS = 8
# noinspection PyPep8Naming
PORT_OUT_EDGE_MAP_POS = 9
# check if param OK
if len(port_list) != 10:
log_error_to_console("EDGE DRAWING JOB MAIN FUNCTION PARAM NOK", str(len(port_list)))
return False
else:
p_in = get_port_from_wave(name=port_list[PORT_IN_POS], wave_offset=port_list[PORT_IN_WAVE])
p_out_edges = get_port_from_wave(name=port_list[PORT_OUT_EDGES_POS])
p_out_edge_map = get_port_from_wave(name=port_list[PORT_OUT_EDGE_MAP_POS])
if p_in.is_valid() is True:
tmp_edge = np.array((1, 1))
try:
# parameters for Edge Drawing
EDParam = {
# gaussian Smooth filter size if smoothed = False
'ksize': port_list[PORT_GAUSS_KERNEL_SIZE],
# gaussian smooth sigma ify smoothed = False
'sigma': port_list[PORT_GAUSS_SIGMA],
# threshold on gradient image
'gradientThreshold': port_list[PORT_GRAD_THR],
# threshold to determine the anchor
'anchorThreshold': port_list[PORT_ANCHOR_THR],
# scan interval, the smaller, the more detail
'scanIntervals': port_list[PORT_SCAN_INTERVAL]}
ED_edge_map = EdgeDrawing(EDParam)
edges, edges_map = ED_edge_map.EdgeDrawing(image=p_in.arr.copy(), smoothed=not (port_list[PORT_IN_SMOOTHING]))
p_out_edge_map.arr[:] = edges_map
p_out_edge_map.set_valid()
for edge_id in range(len(edges)):
tmp_edge = np.array(edges[edge_id])
p_out_edges.arr[edge_id][:len(tmp_edge)] = tmp_edge
p_out_edges.set_valid()
except IndexError as error:
log_error_to_console("ED_LINE JOB NOK! PLEASE ADJUST NUMBER OF EDGES: {e_a} < {e_m}".format(
e_a=len(edges), e_m=p_out_edges.arr.shape[0]))
pass
except ValueError as error:
log_error_to_console("ED_LINE JOB NOK! PLEASE ADJUST NUMBER OF PIXELS OF EDGES: {e_a} < {e_m}".format(
e_a=tmp_edge.shape[0], e_m=p_out_edges.arr.shape[1]))
pass
except BaseException as error:
log_error_to_console("ED_LINE JOB NOK: ", str(error))
pass
else:
return False
return True
def main_edge_drawing_mod_func(port_list: list = None) -> bool:
"""
Edge segment detection algorithm that runs real-time and produces high quality edge segments, each of which is a linear pixel chain.
Unlike traditional edge detectors, which work on the thresholded gradient magnitude cluster to determine edge elements, our method first
spots sparse points along rows and columns called anchors, and then joins these anchors via a smart, heuristic edge tracing procedure,
hence the name Edge Drawing (ED). ED produces edge maps that always consist of clean, perfectly contiguous, well-localized, one-pixel
wide edges.
:param port_list: Param needed list of port names [input, wave_of_input, do_smoothing, edges_output, edge_map_output]
:return: True if the job executed OK.
"""
# noinspection PyPep8Naming
PORT_IN_POS = 0
# noinspection PyPep8Naming
PORT_IN_WAVE = 1
# noinspection PyPep8Naming
KERNEL_X_POS = 2
# noinspection PyPep8Naming
KERNEL_Y_POS = 3
# noinspection PyPep8Naming
PORT_GRAD_THR = 4
# noinspection PyPep8Naming
PORT_ANCHOR_THR = 5
# noinspection PyPep8Naming
PORT_SCAN_INTERVAL = 6
# noinspection PyPep8Naming
PORT_OUT_EDGES_POS = 7
# noinspection PyPep8Naming
PORT_OUT_EDGE_MAP_POS = 8
# check if param OK
if len(port_list) != 9:
log_error_to_console("EDGE DRAWING JOB MAIN FUNCTION PARAM NOK", str(len(port_list)))
return False
else:
p_in = get_port_from_wave(name=port_list[PORT_IN_POS], wave_offset=port_list[PORT_IN_WAVE])
p_out_edges = get_port_from_wave(name=port_list[PORT_OUT_EDGES_POS])
p_out_edge_map = get_port_from_wave(name=port_list[PORT_OUT_EDGE_MAP_POS])
if p_in.is_valid() is True:
tmp_edge = np.array((1, 1))
# if True:
try:
param_grad = 0
if isinstance(port_list[PORT_GRAD_THR], str):
param_grad = get_port_from_wave(name=port_list[PORT_GRAD_THR], wave_offset=port_list[PORT_IN_WAVE]).arr[0]
else:
param_grad = port_list[PORT_GRAD_THR]
anchor_param = 0
if isinstance(port_list[PORT_ANCHOR_THR], str):
anchor_param = get_port_from_wave(name=port_list[PORT_ANCHOR_THR], wave_offset=port_list[PORT_IN_WAVE]).arr[0]
else:
anchor_param = port_list[PORT_ANCHOR_THR]
if 'x' in port_list[KERNEL_X_POS] or 'y' in port_list[KERNEL_Y_POS]:
kernel_x = eval('Application.Jobs.kernels.' + port_list[KERNEL_X_POS])
kernel_y = eval('Application.Jobs.kernels.' + port_list[KERNEL_Y_POS])
else:
kernel_x = np.array(eval(port_list[KERNEL_X_POS]))
kernel_y = np.array(eval(port_list[KERNEL_Y_POS]))
# parameters for Edge Drawing
EDParam = {
# threshold on gradient image
'gradientThreshold': param_grad,
# threshold to determine the anchor
'anchorThreshold': anchor_param,
# scan interval, the smaller, the more detail
'scanIntervals': port_list[PORT_SCAN_INTERVAL],
'kernel_x': kernel_x,
'kernel_y': kernel_y,
}
ED_edge_map = EdgeDrawing_modified(EDParam)
edges, edges_map = ED_edge_map.EdgeDrawing(image=p_in.arr.copy())
p_out_edge_map.arr[:] = edges_map
p_out_edge_map.set_valid()
for edge_id in range(len(edges)):
tmp_edge = np.array(edges[edge_id])
p_out_edges.arr[edge_id][:len(tmp_edge)] = tmp_edge
p_out_edges.set_valid()
except IndexError as error:
log_error_to_console("ED_LINE JOB NOK! PLEASE ADJUST NUMBER OF EDGES: {e_a} < {e_m}".format(
e_a=len(edges), e_m=p_out_edges.arr.shape[0]))
pass
except ValueError as error:
log_error_to_console("ED_LINE JOB NOK! PLEASE ADJUST NUMBER OF PIXELS OF EDGES: {e_a} < {e_m}".format(
e_a=tmp_edge.shape[0], e_m=p_out_edges.arr.shape[1]))
pass
except BaseException as error:
log_error_to_console("ED_LINE JOB NOK: ", str(error))
pass
else:
return False
return True
def main_ed_line_func(port_list: list = None) -> bool:
"""
:param port_list: Param needed list of port names [input1, kernel_size, sigma, output]
List of ports passed as parameters should be even. Every input picture should have a output port.
:return: True if the job executed OK.
"""
# noinspection PyPep8Naming
PORT_IN_POS = 0
# noinspection PyPep8Naming
PORT_IN_WAVE_POS = 1
# noinspection PyPep8Naming
PORT_IN_SMOOTHING_POS = 2
# noinspection PyPep8Naming
PORT_GAUSS_KERNEL_SIZE_POS = 3
# noinspection PyPep8Naming
PORT_GAUSS_SIGMA_POS = 4
# noinspection PyPep8Naming
PORT_GRAD_THR_POS = 5
# noinspection PyPep8Naming
PORT_ANCHOR_THR_POS = 6
# noinspection PyPep8Naming
PORT_SCAN_INTERVAL_POS = 7
# noinspection PyPep8Naming
PORT_MIN_LINE_LEN_POS = 8
# noinspection PyPep8Naming
PORT_IN_FIT_ERR_THR_POS = 9
# noinspection PyPep8Naming
PORT_OUT_EDGES_POS = 10
# noinspection PyPep8Naming
PORT_OUT_EDGE_MAP_POS = 11
# noinspection PyPep8Naming
PORT_OUT_LINE_POS = 12
# noinspection PyPep8Naming
PORT_OUT_LINE_MAP_POS = 13
# check if param OK
if len(port_list) != 14:
log_error_to_console("ED_LINE JOB MAIN FUNCTION PARAM NOK", str(len(port_list)))
return False
else:
p_in = get_port_from_wave(name=port_list[PORT_IN_POS], wave_offset=port_list[PORT_IN_WAVE_POS])
p_out_edges = get_port_from_wave(name=port_list[PORT_OUT_EDGES_POS])
p_out_edge_map = get_port_from_wave(name=port_list[PORT_OUT_EDGE_MAP_POS])
p_out_lines = get_port_from_wave(name=port_list[PORT_OUT_LINE_POS])
p_out_map_lines = get_port_from_wave(name=port_list[PORT_OUT_LINE_MAP_POS])
if p_in.is_valid() is True:
try:
# parameters for Edge Drawing
EDParam = {
# gaussian Smooth filter size if smoothed = False
'ksize': port_list[PORT_GAUSS_KERNEL_SIZE_POS],
# gaussian smooth sigma ify smoothed = False
'sigma': port_list[PORT_GAUSS_SIGMA_POS],
# threshold on gradient image
'gradientThreshold': port_list[PORT_GRAD_THR_POS],
# threshold to determine the anchor
'anchorThreshold': port_list[PORT_ANCHOR_THR_POS],
# scan interval, the smaller, the more detail
'scanIntervals': port_list[PORT_SCAN_INTERVAL_POS]}
ED_edge_map = EdgeDrawing(EDParam)
edges, edges_map = ED_edge_map.EdgeDrawing(image=p_in.arr.copy(), smoothed=not (port_list[PORT_IN_SMOOTHING_POS]))
lines = EDLine(edges=edges, minLineLen=port_list[PORT_MIN_LINE_LEN_POS],
lineFitErrThreshold=port_list[PORT_IN_FIT_ERR_THR_POS])
tmp_line = np.array((1, 1))
tmp_edge = np.array((1, 1))
p_out_edge_map.arr[:] = edges_map
p_out_edge_map.set_valid()
for edge_id in range(len(edges)):
tmp_edge = np.array(edges[edge_id])
p_out_edges.arr[edge_id][:len(tmp_edge)] = tmp_edge
p_out_edges.set_valid()
# tmp_img = np.zeros((p_in.arr.shape[0], p_in.arr.shape[1], 3), dtype=np.uint8)
for line_id in range(len(lines)):
tmp_line = np.array(lines[line_id])
p_out_lines.arr[line_id][:len(tmp_line)] = tmp_line
for el in tmp_line:
p_out_map_lines.arr[el[0], el[1]] = 255
# to activate if needed RGB lines.
# label_hue = np.uint8((line_id + 1) % 179)
# blank_ch = np.uint8(255 * label_hue)
# for el in tmp_line:
# tmp_img[el[0], el[1], :] = (label_hue, blank_ch, blank_ch)
# Converting cvt to BGR
# p_out_map_lines.arr[:] = cv2.cvtColor(tmp_img, cv2.COLOR_HSV2BGR)
p_out_lines.set_valid()
p_out_map_lines.set_valid()
except IndexError as error:
log_error_to_console("ED_LINE JOB NOK! PLEASE ADJUST NUMBER OF EDGES: {e_a} < {e_m} OR LINES: {l_a} < {l_m}".format(
e_a=len(edges), e_m=p_out_edges.arr.shape[0], l_a=len(lines), l_m=p_out_lines.arr.shape[0]))
pass
except ValueError as error:
log_error_to_console(
"ED_LINE JOB NOK! PLEASE ADJUST NUMBER OF PIXELS OF EDGES: {e_a} < {e_m} OR LINES: {l_a} < {l_m}".format(
e_a=tmp_edge.shape[0], e_m=p_out_edges.arr.shape[1], l_a=tmp_line.shape[0], l_m=p_out_lines.arr.shape[1]))
pass
except BaseException as error:
log_error_to_console("ED_LINE JOB NOK: ", str(error))
pass
else:
return False
return True
def main_ed_line_mod_func(port_list: list = None) -> bool:
"""
:param port_list: Param needed list of port names [input1, kernel_size, sigma, output]
List of ports passed as parameters should be even. Every input picture should have a output port.
:return: True if the job executed OK.
"""
# noinspection PyPep8Naming
PORT_IN_POS = 0
# noinspection PyPep8Naming
PORT_IN_WAVE_POS = 1
# noinspection PyPep8Naming
KERNEL_X_POS = 2
# noinspection PyPep8Naming
KERNEL_Y_POS = 3
# noinspection PyPep8Naming
PORT_GRAD_THR_POS = 4
# noinspection PyPep8Naming
PORT_ANCHOR_THR_POS = 5
# noinspection PyPep8Naming
PORT_SCAN_INTERVAL_POS = 6
# noinspection PyPep8Naming
PORT_MIN_LINE_LEN_POS = 7
# noinspection PyPep8Naming
PORT_IN_FIT_ERR_THR_POS = 8
# noinspection PyPep8Naming
PORT_OUT_EDGES_POS = 9
# noinspection PyPep8Naming
PORT_OUT_EDGE_MAP_POS = 10
# noinspection PyPep8Naming
PORT_OUT_LINE_POS = 11
# noinspection PyPep8Naming
PORT_OUT_LINE_MAP_POS = 12
# check if param OK
if len(port_list) != 13:
log_error_to_console("ED_LINE_MOD JOB MAIN FUNCTION PARAM NOK", str(len(port_list)))
return False
else:
p_in = get_port_from_wave(name=port_list[PORT_IN_POS], wave_offset=port_list[PORT_IN_WAVE_POS])
p_out_edges = get_port_from_wave(name=port_list[PORT_OUT_EDGES_POS])
p_out_edge_map = get_port_from_wave(name=port_list[PORT_OUT_EDGE_MAP_POS])
p_out_lines = get_port_from_wave(name=port_list[PORT_OUT_LINE_POS])
p_out_map_lines = get_port_from_wave(name=port_list[PORT_OUT_LINE_MAP_POS])
if p_in.is_valid() is True:
try:
param_grad = 0
if isinstance(port_list[PORT_GRAD_THR_POS], str):
param_grad = get_port_from_wave(name=port_list[PORT_GRAD_THR_POS], wave_offset=port_list[PORT_IN_WAVE_POS]).arr[0]
else:
param_grad = port_list[PORT_GRAD_THR_POS]
anchor_param = 0
if isinstance(port_list[PORT_ANCHOR_THR_POS], str):
anchor_param = get_port_from_wave(name=port_list[PORT_ANCHOR_THR_POS], wave_offset=port_list[PORT_IN_WAVE_POS]).arr[0]
else:
anchor_param = port_list[PORT_ANCHOR_THR_POS]
if 'x' in port_list[KERNEL_X_POS] or 'y' in port_list[KERNEL_Y_POS]:
kernel_x = eval('Application.Jobs.kernels.' + port_list[KERNEL_X_POS])
kernel_y = eval('Application.Jobs.kernels.' + port_list[KERNEL_Y_POS])
else:
kernel_x = np.array(eval(port_list[KERNEL_X_POS]))
kernel_y = np.array(eval(port_list[KERNEL_Y_POS]))
# parameters for Edge Drawing
EDParam = \
{
# threshold on gradient image
'gradientThreshold': param_grad,
# threshold to determine the anchor
'anchorThreshold': anchor_param,
# scan interval, the smaller, the more detail
'scanIntervals': port_list[PORT_SCAN_INTERVAL_POS],
'kernel_x': kernel_x,
'kernel_y': kernel_y,
}
ED_edge_map = EdgeDrawing_modified(EDParam)
edges, edges_map = ED_edge_map.EdgeDrawing(image=p_in.arr.copy())
lines = EDLine(edges=edges, minLineLen=port_list[PORT_MIN_LINE_LEN_POS],
lineFitErrThreshold=port_list[PORT_IN_FIT_ERR_THR_POS])
tmp_line = np.array((1, 1))
tmp_edge = np.array((1, 1))
p_out_edge_map.arr[:] = edges_map
p_out_edge_map.set_valid()
for edge_id in range(len(edges)):
tmp_edge = np.array(edges[edge_id])
p_out_edges.arr[edge_id][:len(tmp_edge),:] = tmp_edge
p_out_edges.set_valid()
# tmp_img = np.zeros((p_in.arr.shape[0], p_in.arr.shape[1], 3), dtype=np.uint8)
for line_id in range(len(lines)):
tmp_line = np.array(lines[line_id])
p_out_lines.arr[line_id][:len(tmp_line), :] = tmp_line
for el in tmp_line:
p_out_map_lines.arr[el[0], el[1]] = 255
# to activate if needed RGB lines.
# label_hue = np.uint8((line_id + 1) % 179)
# blank_ch = np.uint8(255 * label_hue)
# for el in tmp_line:
# tmp_img[el[0], el[1], :] = (label_hue, blank_ch, blank_ch)
# Converting cvt to BGR
# p_out_map_lines.arr[:] = cv2.cvtColor(tmp_img, cv2.COLOR_HSV2BGR)
p_out_lines.set_valid()
p_out_map_lines.set_valid()
except IndexError as error:
log_error_to_console("ED_LINE JOB NOK! PLEASE ADJUST NUMBER OF EDGES: {e_a} < {e_m} OR LINES: {l_a} < {l_m}".format(
e_a=len(edges), e_m=p_out_edges.arr.shape[0], l_a=len(lines), l_m=p_out_lines.arr.shape[0]))
pass
except ValueError as error:
log_error_to_console(
"ED_LINE JOB NOK! PLEASE ADJUST NUMBER OF PIXELS OF EDGES: {e_a} < {e_m} OR LINES: {l_a} < {l_m}".format(
e_a=tmp_edge.shape[0], e_m=p_out_edges.arr.shape[1], l_a=tmp_line.shape[0], l_m=p_out_lines.arr.shape[1]))
p_out_map_lines.arr[:]=np.zeros(shape=p_out_map_lines.arr.shape, dtype=p_out_map_lines.arr.dtype)
p_out_map_lines.set_valid()
pass
except BaseException as error:
log_error_to_console("ED_LINE JOB NOK: ", str(error))
pass
else:
return False
return True
############################################################################################################################################
# Job create functions
############################################################################################################################################
def do_edge_drawing_job(port_input_name: str,
max_edges: int = 5000, max_points_edge: int = 500,
gradient_thr: int = 36, anchor_thr: int = 8, scan_interval: int = 1,
do_smoothing: bool = True, gaussian_kernel_size: int = 3, gaussian_sigma: float = 1,
port_edge_map_name_output: str = None, port_edges_name_output: str = None,
level: PYRAMID_LEVEL = PYRAMID_LEVEL.LEVEL_0, wave_offset: int = 0) -> Tuple[str, str]:
"""
Edge segment detection algorithm that runs real-time and produces high quality edge segments, each of which is a linear pixel chain.
Unlike traditional edge detectors, which work on the thresholded gradient magnitude cluster to determine edge elements, our method first
spots sparse points along rows and columns called anchors, and then joins these anchors via a smart, heuristic edge tracing procedure,
hence the name Edge Drawing (ED). ED produces edge maps that always consist of clean, perfectly contiguous, well-localized, one-pixel
wide edges.
:param port_input_name: name of input port
:param max_edges: max number of edges to hold in port
:param max_points_edge: max number of points per edge
:param gradient_thr: threshold on gradient image
:param anchor_thr: threshold to determine the anchor
:param scan_interval: scan interval, the smaller, the more detail
:param do_smoothing: if we want to smooth the image
:param gaussian_kernel_size: gaussian Smooth filter size if smoothed = False
:param gaussian_sigma: gaussian smooth sigma ify smoothed = False
:param port_edge_map_name_output: name of output port for edge map
:param port_edges_name_output: name of output port for list of edge points
:param level: pyramid level to calculate at
:param wave_offset: port wave offset. If 0 it is in current wave.
:return: output image port name
"""
input_port_name = transform_port_name_lvl(name=port_input_name, lvl=level)
if port_edge_map_name_output is None:
port_edge_map_name_output = 'EDGE_DRAWING_THR_' + str(gradient_thr) + '_ANC_THR_' + str(anchor_thr) + '_SCAN_' + str(scan_interval)
if do_smoothing is True:
port_edge_map_name_output += '_GAUSS_S_' + str(gaussian_sigma).replace(".", "_") + '_K_' + str(gaussian_kernel_size) + '_' + port_input_name
else:
port_edge_map_name_output += '_' + port_input_name
if port_edges_name_output is None:
port_edges_name_output = 'EDGE_DRAWING_SEGMENTS_' + str(gradient_thr) + '_ANC_THR_' + str(anchor_thr) + '_SCAN_' + str(
scan_interval)
if do_smoothing is True:
port_edges_name_output += '_GAUSS_S_' + str(gaussian_sigma).replace(".", "_") + '_' + port_input_name
else:
port_edges_name_output += '_' + port_input_name
output_port_edge_map_name = transform_port_name_lvl(name=port_edge_map_name_output, lvl=level)
output_port_edge_map_size = transform_port_size_lvl(lvl=level, rgb=False)
output_port_edges_name = transform_port_name_lvl(name=port_edges_name_output, lvl=level)
input_port_list = [input_port_name]
main_func_list = [input_port_name, wave_offset, do_smoothing, gaussian_kernel_size, gaussian_sigma, gradient_thr, anchor_thr,
scan_interval, output_port_edges_name, output_port_edge_map_name]
output_port_list = [(output_port_edges_name, "(" + str(max_edges) + "," + str(max_points_edge) + ", 2)", 'H', False),
(output_port_edge_map_name, output_port_edge_map_size, 'B', True)]
job_name = job_name_create(action='Edge Drawing', input_list=input_port_list, wave_offset=[wave_offset], level=level)
d = create_dictionary_element(job_module=get_module_name_from_file(__file__),
job_name=job_name,
input_ports=input_port_list,
init_func_name='init_func_edge_drawing', init_func_param=None,
main_func_name='main_edge_drawing_func',
main_func_param=main_func_list,
output_ports=output_port_list)
jobs_dict.append(d)
return port_edge_map_name_output, port_edges_name_output
def do_edge_drawing_mod_job(port_input_name: str, operator: str,
max_edges: int = 5000, max_points_edge: int = 500,
gradient_thr: int = 36, anchor_thr: int = 8, scan_interval: int = 1,
port_edge_map_name_output: str = None, port_edges_name_output: str = None,
level: PYRAMID_LEVEL = PYRAMID_LEVEL.LEVEL_0, wave_offset: int = 0) -> Tuple[str, str]:
"""
Edge segment detection algorithm that runs real-time and produces high quality edge segments, each of which is a linear pixel chain.
Unlike traditional edge detectors, which work on the thresholded gradient magnitude cluster to determine edge elements, our method first
spots sparse points along rows and columns called anchors, and then joins these anchors via a smart, heuristic edge tracing procedure,
hence the name Edge Drawing (ED). ED produces edge maps that always consist of clean, perfectly contiguous, well-localized, one-pixel
wide edges.
Smoothing should be done independently of this job.
:param port_input_name: name of input port
:param operator: what operator we wish to use
:param gradient_thr: threshold on gradient image
:param anchor_thr: threshold to determine the anchor
:param scan_interval: scan interval, the smaller, the more detail
:param max_edges: max number of edges to hold in port
:param max_points_edge: max number of points per edge
:param port_edge_map_name_output: name of output port for edge map
:param port_edges_name_output: name of output port for list of edge points
:param level: pyramid level to calculate at
:param wave_offset: port wave offset. If 0 it is in current wave.
:return: output image port name
"""
input_port_name = transform_port_name_lvl(name=port_input_name, lvl=level)
operator_job = operator.replace('_', ' ')
kernel_x = operator.lower() + '_x'
kernel_y = operator.lower() + '_y'
if port_edge_map_name_output is None:
port_edge_map_name_output = 'EDGE_DRAWING_MOD_THR_' + str(gradient_thr) + '_ANC_THR_' + str(anchor_thr) + '_SCAN_' + str(scan_interval)
port_edge_map_name_output += '_' + operator_job.replace(' ', '_') + '_' + port_input_name
if port_edges_name_output is None:
port_edges_name_output = 'EDGE_DRAWING_MOD_SEGMENTS_' + str(gradient_thr) + '_ANC_THR_' + str(anchor_thr) + '_SCAN_' + str(
scan_interval)
port_edges_name_output += '_' + operator_job.replace(' ', '_') + '_' + port_input_name
if isinstance(gradient_thr, str):
gradient_thr += '_' + level
if isinstance(anchor_thr, str):
anchor_thr += '_' + level
output_port_edge_map_name = transform_port_name_lvl(name=port_edge_map_name_output, lvl=level)
output_port_edge_map_size = transform_port_size_lvl(lvl=level, rgb=False)
output_port_edges_name = transform_port_name_lvl(name=port_edges_name_output, lvl=level)
input_port_list = [input_port_name]
main_func_list = [input_port_name, wave_offset, kernel_x, kernel_y, gradient_thr, anchor_thr,
scan_interval, output_port_edges_name, output_port_edge_map_name]
output_port_list = [(output_port_edges_name, "(" + str(max_edges) + "," + str(max_points_edge) + ", 2)", 'H', False),
(output_port_edge_map_name, output_port_edge_map_size, 'B', True)]
job_name = job_name_create(action='Edge Drawing Modified', input_list=input_port_list, wave_offset=[wave_offset], level=level)
d = create_dictionary_element(job_module=get_module_name_from_file(__file__),
job_name=job_name,
input_ports=input_port_list,
init_func_name='init_func_edge_drawing', init_func_param=None,
main_func_name='main_edge_drawing_mod_func',
main_func_param=main_func_list,
output_ports=output_port_list)
jobs_dict.append(d)
return port_edge_map_name_output, port_edges_name_output
def do_ed_lines_job(port_input_name: str, min_line_length: int,
max_edges: int = 5000, max_points_edge: int = 500, max_lines: int = 5000, max_points_line: int = 500,
gradient_thr: int = 36, anchor_thr: int = 8, scan_interval: int = 1, line_fit_err_thr: int = 1,
do_smoothing: bool = True, gaussian_kernel_size: int = 0, gaussian_sigma: float = 0,
port_edge_map_name_output: str = None, port_edges_name_output: str = None,
port_lines_name_output: str = None, port_lines_img_output: str = None,
level: PYRAMID_LEVEL = PYRAMID_LEVEL.LEVEL_0, wave_offset: int = 0) -> Tuple[str, str, str, str]:
"""
EDLines is comprised of three steps: (1) Given a grayscale image, we first run our fast, novel edge detector, the Edge Drawing (ED)
algorithm, which produces a set of clean, contiguous chains of pixels, which we call edge segments. Edge segments intuitively correspond
to object boundaries. (2)Next, we extract line segments from the generated pixel chains by means of a straightness criterion, i.e.,
by the Least Squares Line Fitting Method. (3) Finally, a line validation step due to the Helmholtz principle is used to eliminate
false line segment detections.
:param port_input_name: name of input port
:param min_line_length: min number of pixel per line
:param max_edges: max number of edges to hold in port
:param max_points_edge: max number of points per edge
:param max_lines: max number of lines to hold in port
:param max_points_line: max number of points per line
:param line_fit_err_thr: line fitting error
:param gradient_thr: threshold on gradient image
:param anchor_thr: threshold to determine the anchor
:param scan_interval: scan interval, the smaller, the more detail
:param do_smoothing: if we want to smooth the image
:param gaussian_kernel_size: gaussian Smooth filter size if smoothed = False
:param gaussian_sigma: gaussian smooth sigma ify smoothed = False
:param port_edge_map_name_output: name of output port
:param port_edges_name_output: name of output port
:param port_lines_name_output: name of output port
:param port_lines_img_output: name of output port
:param level: pyramid level to calculate at
:param wave_offset: port wave offset. If 0 it is in current wave.
:return: output image port names: port_edges_name_output, port_edge_map_name_output, port_lines_name_output, port_lines_img_output
"""
input_port_name = transform_port_name_lvl(name=port_input_name, lvl=level)
if port_edge_map_name_output is None:
port_edge_map_name_output = 'EDGE_DRAWING_THR_' + str(gradient_thr) + '_ANC_THR_' + str(anchor_thr) + '_SCAN_' + str(scan_interval)
if do_smoothing is True:
port_edge_map_name_output += '_GAUSS_S_' + str(gaussian_sigma).replace(".", "_") + '_' + port_input_name
else:
port_edge_map_name_output += '_' + port_input_name
if port_edges_name_output is None:
port_edges_name_output = 'EDGE_DRAWING_SEGMENTS_' + str(gradient_thr) + '_ANC_THR_' + str(anchor_thr) + '_SCAN_' + str(
scan_interval)
if do_smoothing is True:
port_edges_name_output += '_GAUSS_S_' + str(gaussian_sigma).replace(".", "_") + '_' + port_input_name
else:
port_edge_map_name_output += '_' + port_input_name
if port_lines_name_output is None:
port_lines_name_output = 'ED_LINES_MIN_LEN_' + str(min_line_length) + '_LINE_FIT_ERR_' + str(
line_fit_err_thr) + '_' + port_edges_name_output
if port_lines_img_output is None:
port_lines_img_output = 'ED_LINES_IMG_MIN_LEN_' + str(min_line_length) + '_LINE_FIT_ERR_' + str(
line_fit_err_thr) + '_' + port_edges_name_output
output_port_edge_map_name = transform_port_name_lvl(name=port_edge_map_name_output, lvl=level)
output_port_edge_map_size = transform_port_size_lvl(lvl=level, rgb=False)
output_port_line_img_name = transform_port_name_lvl(name=port_lines_img_output, lvl=level)
output_port_line_img_size = transform_port_size_lvl(lvl=level, rgb=False)
output_port_edges_name = transform_port_name_lvl(name=port_edges_name_output, lvl=level)
output_port_lines_name = transform_port_name_lvl(name=port_lines_name_output, lvl=level)
input_port_list = [input_port_name]
main_func_list = [input_port_name, wave_offset,
do_smoothing, gaussian_kernel_size, gaussian_sigma, gradient_thr, anchor_thr, scan_interval,
min_line_length, line_fit_err_thr,
output_port_edges_name, output_port_edge_map_name,
output_port_lines_name, output_port_line_img_name]
output_port_list = [(output_port_edges_name, "(" + str(max_edges) + "," + str(max_points_edge) + ", 2)", 'H', False),
(output_port_edge_map_name, output_port_edge_map_size, 'B', True),
(output_port_lines_name, "(" + str(max_lines) + "," + str(max_points_line) + ", 2)", 'H', False),
(output_port_line_img_name, output_port_line_img_size, 'B', True)]
job_name = job_name_create(action='Ed_Lines', input_list=input_port_list, wave_offset=[wave_offset], level=level)
d = create_dictionary_element(job_module=get_module_name_from_file(__file__),
job_name=job_name,
input_ports=input_port_list,
init_func_name='init_func_ed_lines', init_func_param=None,
main_func_name='main_ed_line_func',
main_func_param=main_func_list,
output_ports=output_port_list)
jobs_dict.append(d)
return port_edge_map_name_output, port_edges_name_output, port_lines_name_output, port_lines_img_output
def do_ed_lines_mod_job(port_input_name: str, min_line_length: int, operator: str,
gradient_thr: int = 36, anchor_thr: int = 8, scan_interval: int = 1, line_fit_err_thr: int = 1,
max_edges: int = 5000, max_points_edge: int = 500, max_lines: int = 5000, max_points_line: int = 500,
port_edge_map_name_output: str = None, port_edges_name_output: str = None,
port_lines_name_output: str = None, port_lines_img_output: str = None,
level: PYRAMID_LEVEL = PYRAMID_LEVEL.LEVEL_0, wave_offset: int = 0) -> Tuple[str, str, str, str]:
"""
EDLines is comprised of three steps: (1) Given a grayscale image, we first run our fast, novel edge detector, the Edge Drawing (ED)
algorithm, which produces a set of clean, contiguous chains of pixels, which we call edge segments. Edge segments intuitively correspond
to object boundaries. (2)Next, we extract line segments from the generated pixel chains by means of a straightness criterion, i.e.,
by the Least Squares Line Fitting Method. (3) Finally, a line validation step due to the Helmholtz principle is used to eliminate
false line segment detections.
:param port_input_name: name of input port
:param line_fit_err_thr: line fitting error
:param operator: what operator we wish to use
:param gradient_thr: threshold on gradient image
:param anchor_thr: threshold to determine the anchor
:param scan_interval: scan interval, the smaller, the more detail
:param min_line_length: min number of pixel per line
:param max_edges: max number of edges to hold in port
:param max_points_edge: max number of points per edge
:param max_lines: max number of lines to hold in port
:param max_points_line: max number of points per line
:param port_edge_map_name_output: name of output port
:param port_edges_name_output: name of output port
:param port_lines_name_output: name of output port
:param port_lines_img_output: name of output port
:param level: pyramid level to calculate at
:param wave_offset: port wave offset. If 0 it is in current wave.
:return: output image port names: port_edges_name_output, port_edge_map_name_output, port_lines_name_output, port_lines_img_output
"""
input_port_name = transform_port_name_lvl(name=port_input_name, lvl=level)
operator_job = operator.replace('_', ' ')
kernel_x = operator.lower() + '_x'
kernel_y = operator.lower() + '_y'
if port_edge_map_name_output is None:
port_edge_map_name_output = 'EDGE_DRAWING_MOD_THR_' + str(gradient_thr) + '_ANC_THR_' + str(anchor_thr) + '_SCAN_'\
+ str(scan_interval)
port_edge_map_name_output += '_' + operator_job.replace(' ', '_') + '_' + port_input_name
if port_edges_name_output is None:
port_edges_name_output = 'EDGE_DRAWING_MOD_SEGMENTS_' + str(gradient_thr) + '_ANC_THR_' + str(anchor_thr) + '_SCAN_'\
+ str(scan_interval)
port_edges_name_output += '_' + operator_job.replace(' ', '_') + '_' + port_input_name
if port_lines_name_output is None:
port_lines_name_output = 'ED_LINES_MIN_LEN_' + str(min_line_length) + '_LINE_FIT_ERR_' + str(line_fit_err_thr) + \
'_' + port_edges_name_output
if port_lines_img_output is None:
port_lines_img_output = 'ED_LINES_IMG_MIN_LEN_' + str(min_line_length) + '_LINE_FIT_ERR_' + str(line_fit_err_thr) + \
'_' + port_edges_name_output
if isinstance(gradient_thr, str):
gradient_thr += '_' + level
if isinstance(anchor_thr, str):
anchor_thr += '_' + level
output_port_edge_map_name = transform_port_name_lvl(name=port_edge_map_name_output, lvl=level)
output_port_edge_map_size = transform_port_size_lvl(lvl=level, rgb=False)
output_port_line_img_name = transform_port_name_lvl(name=port_lines_img_output, lvl=level)
output_port_line_img_size = transform_port_size_lvl(lvl=level, rgb=False)
output_port_edges_name = transform_port_name_lvl(name=port_edges_name_output, lvl=level)
output_port_lines_name = transform_port_name_lvl(name=port_lines_name_output, lvl=level)
input_port_list = [input_port_name]
main_func_list = [input_port_name, wave_offset, kernel_x, kernel_y,
gradient_thr, anchor_thr, scan_interval,
min_line_length, line_fit_err_thr,
output_port_edges_name, output_port_edge_map_name,
output_port_lines_name, output_port_line_img_name]
output_port_list = [(output_port_edges_name, "(" + str(max_edges) + "," + str(max_points_edge) + ", 2)", 'H', False),
(output_port_edge_map_name, output_port_edge_map_size, 'B', True),
(output_port_lines_name, "(" + str(max_lines) + "," + str(max_points_line) + ", 2)", 'H', False),
(output_port_line_img_name, output_port_line_img_size, 'B', True)]
job_name = job_name_create(action='Ed_Lines', input_list=input_port_list, wave_offset=[wave_offset], level=level)
d = create_dictionary_element(job_module=get_module_name_from_file(__file__),
job_name=job_name,
input_ports=input_port_list,
init_func_name='init_func_ed_lines', init_func_param=None,
main_func_name='main_ed_line_mod_func',
main_func_param=main_func_list,
output_ports=output_port_list)
jobs_dict.append(d)
return port_edge_map_name_output, port_edges_name_output, port_lines_name_output, port_lines_img_output
if __name__ == "__main__":
pass
| 51.114929
| 153
| 0.628938
| 5,713
| 43,141
| 4.364432
| 0.057238
| 0.033047
| 0.025989
| 0.027071
| 0.959734
| 0.95063
| 0.918304
| 0.898773
| 0.886821
| 0.879402
| 0
| 0.009025
| 0.278297
| 43,141
| 843
| 154
| 51.175563
| 0.79181
| 0.254236
| 0
| 0.733475
| 0
| 0.008529
| 0.065941
| 0.013168
| 0
| 0
| 0
| 0
| 0
| 1
| 0.021322
| false
| 0.027719
| 0.031983
| 0
| 0.091684
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5a668418f81dc5c953465d01e9627b7fc77fa970
| 13,360
|
py
|
Python
|
trader/chartItem.py
|
freshjang/MyKiwoom
|
6342ec7ba8da55194bb473f9052d87f7fa1a640e
|
[
"MIT"
] | 1
|
2022-01-06T15:06:24.000Z
|
2022-01-06T15:06:24.000Z
|
trader/chartItem.py
|
manjuni/MyKiwoom
|
67230451b02705b87573c43f7404ca9accf1035d
|
[
"MIT"
] | null | null | null |
trader/chartItem.py
|
manjuni/MyKiwoom
|
67230451b02705b87573c43f7404ca9accf1035d
|
[
"MIT"
] | 4
|
2021-11-27T12:09:52.000Z
|
2022-01-14T14:54:27.000Z
|
import os
import sys
import pyqtgraph as pg
from PyQt5.QtGui import QPicture, QPainter
from PyQt5.QtCore import Qt, QRectF, QPointF
sys.path.append(os.path.dirname(os.path.abspath(os.path.dirname(__file__))))
from utility.setting import *
class ChuseItem(pg.GraphicsObject):
def __init__(self, df, ymin, ymax):
pg.GraphicsObject.__init__(self)
self.picture = QPicture()
self.Chuse(df, ymin, ymax)
def Chuse(self, df, ymin, ymax):
p = QPainter(self.picture)
height = ymax - ymin
for i in range(len(df)):
if i < len(df) - 1:
if df['추세'][i]:
p.setBrush(pg.mkBrush(color_chuse2))
p.setPen(pg.mkPen(color_chuse2))
p.drawRect(QRectF(i - 1, ymin, 1, height))
else:
p.setBrush(pg.mkBrush(color_chuse1))
p.setPen(pg.mkPen(color_chuse1))
p.drawRect(QRectF(i - 1, ymin, 1, height))
p.end()
def paint(self, p, *args):
if args is None:
return
p.drawPicture(0, 0, self.picture)
def boundingRect(self):
return QRectF(self.picture.boundingRect())
class LastChuseItem(pg.GraphicsObject):
def __init__(self, df, ymin, ymax):
pg.GraphicsObject.__init__(self)
self.picture = QPicture()
self.LastChuse(df, ymin, ymax)
def LastChuse(self, df, ymin, ymax):
p = QPainter(self.picture)
height = ymax - ymin
if df['추세'][-1]:
p.setBrush(pg.mkBrush(color_chuse2))
p.setPen(pg.mkPen(color_chuse2))
p.drawRect(QRectF(len(df) - 2, ymin, 1, height))
else:
p.setBrush(pg.mkBrush(color_chuse1))
p.setPen(pg.mkPen(color_chuse1))
p.drawRect(QRectF(len(df) - 2, ymin, 1, height))
p.end()
def paint(self, p, *args):
if args is None:
return
p.drawPicture(0, 0, self.picture)
def boundingRect(self):
return QRectF(self.picture.boundingRect())
class MoveavgItem(pg.GraphicsObject):
def __init__(self, df, gubun):
pg.GraphicsObject.__init__(self)
self.picture = QPicture()
self.Movwavg(df, gubun)
def Movwavg(self, df, gubun):
p = QPainter(self.picture)
for i in range(len(df)):
if i < len(df) - 2:
if gubun in [ui_num['차트P2'], ui_num['차트P4'], ui_num['차트P5'], ui_num['차트P7'], ui_num['차트P9']]:
ema050 = df['지수이평05'][i]
ema051 = df['지수이평05'][i + 1]
ema100 = df['지수이평10'][i]
ema101 = df['지수이평10'][i + 1]
ema200 = df['지수이평20'][i]
ema201 = df['지수이평20'][i + 1]
ema400 = df['지수이평40'][i]
ema401 = df['지수이평40'][i + 1]
ema600 = df['지수이평60'][i]
ema601 = df['지수이평60'][i + 1]
ema120 = df['지수이평120'][i]
ema121 = df['지수이평120'][i + 1]
p.setPen(pg.mkPen(color_ema05))
p.drawLine(QPointF(i, ema050), QPointF(i + 1, ema051))
p.setPen(pg.mkPen(color_ema10))
p.drawLine(QPointF(i, ema100), QPointF(i + 1, ema101))
p.setPen(pg.mkPen(color_ema20))
p.drawLine(QPointF(i, ema200), QPointF(i + 1, ema201))
p.setPen(pg.mkPen(color_ema40))
p.drawLine(QPointF(i, ema400), QPointF(i + 1, ema401))
p.setPen(pg.mkPen(color_ema60))
p.drawLine(QPointF(i, ema600), QPointF(i + 1, ema601))
p.setPen(pg.mkPen(color_ema120))
p.drawLine(QPointF(i, ema120), QPointF(i + 1, ema121))
else:
ema050 = df['지수이평05'][i]
ema051 = df['지수이평05'][i + 1]
ema200 = df['지수이평20'][i]
ema201 = df['지수이평20'][i + 1]
ema600 = df['지수이평60'][i]
ema601 = df['지수이평60'][i + 1]
p.setPen(pg.mkPen(color_ema05))
p.drawLine(QPointF(i, ema050), QPointF(i + 1, ema051))
p.setPen(pg.mkPen(color_ema20))
p.drawLine(QPointF(i, ema200), QPointF(i + 1, ema201))
p.setPen(pg.mkPen(color_ema60))
p.drawLine(QPointF(i, ema600), QPointF(i + 1, ema601))
if gubun in [ui_num['차트P1'], ui_num['차트P3']]:
ema120 = df['지수이평120'][i]
ema121 = df['지수이평120'][i + 1]
ema240 = df['지수이평240'][i]
ema241 = df['지수이평240'][i + 1]
ema480 = df['지수이평480'][i]
ema481 = df['지수이평480'][i + 1]
p.setPen(pg.mkPen(color_ema120))
p.drawLine(QPointF(i, ema120), QPointF(i + 1, ema121))
p.setPen(pg.mkPen(color_ema240))
p.drawLine(QPointF(i, ema240), QPointF(i + 1, ema241))
p.setPen(pg.mkPen(color_ema480))
p.drawLine(QPointF(i, ema480), QPointF(i + 1, ema481))
p.end()
def paint(self, p, *args):
if args is None:
return
p.drawPicture(0, 0, self.picture)
def boundingRect(self):
return QRectF(self.picture.boundingRect())
class LastMoveavgItem(pg.GraphicsObject):
def __init__(self, df, gubun):
pg.GraphicsObject.__init__(self)
self.picture = QPicture()
self.LastMovwavg(df, gubun)
def LastMovwavg(self, df, gubun):
p = QPainter(self.picture)
if gubun in [ui_num['차트P2'], ui_num['차트P4'], ui_num['차트P5'], ui_num['차트P7'], ui_num['차트P9']]:
ema050 = df['지수이평05'][-2]
ema051 = df['지수이평05'][-1]
ema100 = df['지수이평10'][-2]
ema101 = df['지수이평10'][-1]
ema200 = df['지수이평20'][-2]
ema201 = df['지수이평20'][-1]
ema400 = df['지수이평40'][-2]
ema401 = df['지수이평40'][-1]
ema600 = df['지수이평60'][-2]
ema601 = df['지수이평60'][-1]
ema120 = df['지수이평120'][-2]
ema121 = df['지수이평120'][-1]
p.setPen(pg.mkPen(color_ema05))
p.drawLine(QPointF(len(df) - 2, ema050), QPointF(len(df) - 1, ema051))
p.setPen(pg.mkPen(color_ema10))
p.drawLine(QPointF(len(df) - 2, ema100), QPointF(len(df) - 1, ema101))
p.setPen(pg.mkPen(color_ema20))
p.drawLine(QPointF(len(df) - 2, ema200), QPointF(len(df) - 1, ema201))
p.setPen(pg.mkPen(color_ema40))
p.drawLine(QPointF(len(df) - 2, ema400), QPointF(len(df) - 1, ema401))
p.setPen(pg.mkPen(color_ema60))
p.drawLine(QPointF(len(df) - 2, ema600), QPointF(len(df) - 1, ema601))
p.setPen(pg.mkPen(color_ema120))
p.drawLine(QPointF(len(df) - 2, ema120), QPointF(len(df) - 1, ema121))
else:
ema050 = df['지수이평05'][-2]
ema051 = df['지수이평05'][-1]
ema200 = df['지수이평20'][-2]
ema201 = df['지수이평20'][-1]
ema600 = df['지수이평60'][-2]
ema601 = df['지수이평60'][-1]
p.setPen(pg.mkPen(color_ema05))
p.drawLine(QPointF(len(df) - 2, ema050), QPointF(len(df) - 1, ema051))
p.setPen(pg.mkPen(color_ema20))
p.drawLine(QPointF(len(df) - 2, ema200), QPointF(len(df) - 1, ema201))
p.setPen(pg.mkPen(color_ema60))
p.drawLine(QPointF(len(df) - 2, ema600), QPointF(len(df) - 1, ema601))
if gubun in [ui_num['차트P1'], ui_num['차트P3']]:
ema120 = df['지수이평120'][-2]
ema121 = df['지수이평120'][-1]
ema240 = df['지수이평240'][-2]
ema241 = df['지수이평240'][-1]
ema480 = df['지수이평480'][-2]
ema481 = df['지수이평480'][-1]
p.setPen(pg.mkPen(color_ema120))
p.drawLine(QPointF(len(df) - 2, ema120), QPointF(len(df) - 1, ema121))
p.setPen(pg.mkPen(color_ema240))
p.drawLine(QPointF(len(df) - 2, ema240), QPointF(len(df) - 1, ema241))
p.setPen(pg.mkPen(color_ema480))
p.drawLine(QPointF(len(df) - 2, ema480), QPointF(len(df) - 1, ema481))
p.end()
def paint(self, p, *args):
if args is None:
return
p.drawPicture(0, 0, self.picture)
def boundingRect(self):
return QRectF(self.picture.boundingRect())
class CandlestickItem(pg.GraphicsObject):
def __init__(self, df):
pg.GraphicsObject.__init__(self)
self.picture = QPicture()
self.CandleSticks(df)
def CandleSticks(self, df):
p = QPainter(self.picture)
for i in range(len(df)):
if i < len(df) - 2:
c = df['현재가'][i]
o = df['시가'][i]
h = df['고가'][i]
low = df['저가'][i]
if c >= o:
p.setPen(pg.mkPen(color_pluss))
p.setBrush(pg.mkBrush(color_pluss))
else:
p.setPen(pg.mkPen(color_minus))
p.setBrush(pg.mkBrush(color_minus))
if h != low:
p.drawLine(QPointF(i, h), QPointF(i, low))
p.drawRect(QRectF(i - 0.25, o, 0.5, c - o))
else:
p.drawLine(QPointF(i - 0.25, c), QPointF(i + 0.25, c))
p.end()
def paint(self, p, *args):
if args is None:
return
p.drawPicture(0, 0, self.picture)
def boundingRect(self):
return QRectF(self.picture.boundingRect())
class LastCandlestickItem(pg.GraphicsObject):
def __init__(self, df):
pg.GraphicsObject.__init__(self)
self.picture = QPicture()
self.LastCandleStick(df)
def LastCandleStick(self, df):
p = QPainter(self.picture)
for i in range(len(df)):
if i >= len(df) - 2:
c = df['현재가'][i]
o = df['시가'][i]
h = df['고가'][i]
low = df['저가'][i]
if c >= o:
p.setPen(pg.mkPen(color_pluss))
p.setBrush(pg.mkBrush(color_pluss))
else:
p.setPen(pg.mkPen(color_minus))
p.setBrush(pg.mkBrush(color_minus))
if h != low:
p.drawLine(QPointF(i, h), QPointF(i, low))
p.drawRect(QRectF(i - 0.25, o, 0.5, c - o))
else:
p.drawLine(QPointF(i - 0.25, c), QPointF(i + 0.25, c))
p.end()
def paint(self, p, *args):
if args is None:
return
p.drawPicture(0, 0, self.picture)
def boundingRect(self):
return QRectF(self.picture.boundingRect())
class VolumeBarsItem(pg.GraphicsObject):
def __init__(self, df):
pg.GraphicsObject.__init__(self)
self.picture = QPicture()
self.MoneyBars(df)
def MoneyBars(self, df):
p = QPainter(self.picture)
for i in range(len(df)):
if i < len(df) - 1:
c = df['현재가'][i]
o = df['시가'][i]
v = df['거래량'][i]
if c >= o:
p.setPen(pg.mkPen(color_pluss))
p.setBrush(pg.mkBrush(color_pluss))
else:
p.setPen(pg.mkPen(color_minus))
p.setBrush(pg.mkBrush(color_minus))
p.drawRect(QRectF(i - 0.25, 0, 0.25 * 2, v))
p.end()
def paint(self, p, *args):
if args is None:
return
p.drawPicture(0, 0, self.picture)
def boundingRect(self):
return QRectF(self.picture.boundingRect())
class LastVolumeBarItem(pg.GraphicsObject):
def __init__(self, x, c, o, v):
pg.GraphicsObject.__init__(self)
self.picture = QPicture()
self.LastMoneybar(x, c, o, v)
def LastMoneybar(self, x, c, o, v):
p = QPainter(self.picture)
if c >= o:
p.setPen(pg.mkPen(color_pluss))
p.setBrush(pg.mkBrush(color_pluss))
else:
p.setPen(pg.mkPen(color_minus))
p.setBrush(pg.mkBrush(color_minus))
p.drawRect(QRectF(x - 0.25, 0, 0.25 * 2, v))
p.end()
def paint(self, p, *args):
if args is None:
return
p.drawPicture(0, 0, self.picture)
def boundingRect(self):
return QRectF(self.picture.boundingRect())
class CustomViewBox1(pg.ViewBox):
def __init__(self, *args, **kwds):
pg.ViewBox.__init__(self, *args, **kwds)
self.setMouseMode(self.RectMode)
self.setMouseEnabled(x=False, y=False)
def mouseClickEvent(self, ev):
if ev.button() == Qt.RightButton:
self.enableAutoRange()
class CustomViewBox2(pg.ViewBox):
def __init__(self, *args, **kwds):
pg.ViewBox.__init__(self, *args, **kwds)
self.setMouseMode(self.RectMode)
self.setMouseEnabled(x=False, y=False)
def mouseClickEvent(self, ev):
pass
def mouseDragEvent(self, ev, axis=None):
pass
| 37.00831
| 109
| 0.502919
| 1,611
| 13,360
| 4.080074
| 0.091868
| 0.027385
| 0.049293
| 0.076677
| 0.841168
| 0.832192
| 0.827172
| 0.82063
| 0.796744
| 0.761905
| 0
| 0.071957
| 0.352994
| 13,360
| 360
| 110
| 37.111111
| 0.688454
| 0
| 0
| 0.785942
| 0
| 0
| 0.029192
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.118211
| false
| 0.00639
| 0.019169
| 0.025559
| 0.220447
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ce70611e125d98fad8da9db53b4ce251721be0bb
| 7,451
|
py
|
Python
|
update_supply_chain_information/activity_stream/test/test_activity_stream_endpoint.py
|
uktrade/update-supply-chain-information
|
5cdcc795257b8351cf11b57487b194012ee8886d
|
[
"MIT"
] | null | null | null |
update_supply_chain_information/activity_stream/test/test_activity_stream_endpoint.py
|
uktrade/update-supply-chain-information
|
5cdcc795257b8351cf11b57487b194012ee8886d
|
[
"MIT"
] | 204
|
2021-05-26T16:15:04.000Z
|
2022-02-14T05:10:44.000Z
|
update_supply_chain_information/activity_stream/test/test_activity_stream_endpoint.py
|
uktrade/defend-data-capture
|
5cdcc795257b8351cf11b57487b194012ee8886d
|
[
"MIT"
] | 1
|
2021-06-26T10:28:30.000Z
|
2021-06-26T10:28:30.000Z
|
from unittest import mock
from urllib.parse import urlparse
import pytest
from activity_stream.test.util.hawk import get_hawk_header
pytestmark = pytest.mark.django_db
class TestActivityStreamEndpoint:
def test_full_page_has_next_link(
self,
wrapped_union_queryset,
logged_in_client,
hawk_authentication_header,
hawk_credentials_setting,
endpoint,
):
page_length = wrapped_union_queryset.count()
with mock.patch(
"activity_stream.pagination.ActivityStreamCursorPagination.get_page_size",
return_value=int(page_length),
):
with mock.patch(
"django.conf.settings.HAWK_CREDENTIALS", hawk_credentials_setting
):
response = logged_in_client.get(
endpoint, HTTP_AUTHORIZATION=hawk_authentication_header
)
assert response.status_code == 200
json = response.json()
assert "next" in json
def test_full_page_has_all_objects(
self,
wrapped_union_queryset,
logged_in_client,
hawk_authentication_header,
hawk_credentials_setting,
endpoint,
):
page_length = wrapped_union_queryset.count()
with mock.patch(
"activity_stream.pagination.ActivityStreamCursorPagination.get_page_size",
return_value=int(page_length),
):
with mock.patch(
"django.conf.settings.HAWK_CREDENTIALS", hawk_credentials_setting
):
response = logged_in_client.get(
endpoint, HTTP_AUTHORIZATION=hawk_authentication_header
)
assert response.status_code == 200
json = response.json()
assert len(json["orderedItems"]) == page_length
def test_last_page_has_no_next_link(
self,
wrapped_union_queryset,
logged_in_client,
hawk_credentials_setting,
hawk_authentication_header,
endpoint,
):
page_length = wrapped_union_queryset.count()
with mock.patch(
"activity_stream.pagination.ActivityStreamCursorPagination.get_page_size",
return_value=int(page_length),
):
with mock.patch(
"django.conf.settings.HAWK_CREDENTIALS", hawk_credentials_setting
):
response = logged_in_client.get(
endpoint, HTTP_AUTHORIZATION=hawk_authentication_header
)
json = response.json()
next_page_link = json["next"]
url_parts = urlparse(next_page_link)
next_page_path = url_parts.path
if url_parts.query:
next_page_path = f"{next_page_path}?{url_parts.query}"
hawk_authentication_header = get_hawk_header(
access_key_id=hawk_credentials_setting["testsettings"]["id"],
secret_access_key=hawk_credentials_setting["testsettings"]["key"],
method="GET",
host="testserver",
port="80",
path=next_page_path,
content_type=b"",
content=b"",
)
response = logged_in_client.get(
next_page_link, HTTP_AUTHORIZATION=hawk_authentication_header
)
assert response.status_code == 200
json = response.json()
assert "next" not in json
def test_last_page_has_no_objects(
self,
wrapped_union_queryset,
logged_in_client,
hawk_authentication_header,
hawk_credentials_setting,
endpoint,
):
page_length = wrapped_union_queryset.count()
with mock.patch(
"activity_stream.pagination.ActivityStreamCursorPagination.get_page_size",
return_value=int(page_length),
):
with mock.patch(
"django.conf.settings.HAWK_CREDENTIALS", hawk_credentials_setting
):
response = logged_in_client.get(
endpoint, HTTP_AUTHORIZATION=hawk_authentication_header
)
json = response.json()
next_page_link = json["next"]
url_parts = urlparse(next_page_link)
next_page_path = url_parts.path
if url_parts.query:
next_page_path = f"{next_page_path}?{url_parts.query}"
hawk_authentication_header = get_hawk_header(
access_key_id=hawk_credentials_setting["testsettings"]["id"],
secret_access_key=hawk_credentials_setting["testsettings"]["key"],
method="GET",
host="testserver",
port="80",
path=next_page_path,
content_type=b"",
content=b"",
)
response = logged_in_client.get(
next_page_link, HTTP_AUTHORIZATION=hawk_authentication_header
)
assert response.status_code == 200
json = response.json()
assert len(json["orderedItems"]) == 0
def test_multiple_pages_have_items_in_order_of_last_modified_date_ascending(
self,
wrapped_union_queryset,
logged_in_client,
hawk_credentials_setting,
endpoint,
):
page_length = (
wrapped_union_queryset.count() // 3
) # ensure things are spread over several pages
with mock.patch(
"activity_stream.pagination.ActivityStreamCursorPagination.get_page_size",
return_value=int(page_length),
):
results = []
with mock.patch(
"django.conf.settings.HAWK_CREDENTIALS", hawk_credentials_setting
):
while endpoint:
url_parts = urlparse(endpoint)
next_page_path = url_parts.path
if url_parts.query:
next_page_path = f"{next_page_path}?{url_parts.query}"
hawk_authentication_header = get_hawk_header(
access_key_id=hawk_credentials_setting["testsettings"]["id"],
secret_access_key=hawk_credentials_setting["testsettings"][
"key"
],
method="GET",
host="testserver",
port="80",
path=next_page_path,
content_type=b"",
content=b"",
)
response = logged_in_client.get(
endpoint, HTTP_AUTHORIZATION=hawk_authentication_header
)
assert response.status_code == 200
json = response.json()
items = json["orderedItems"]
results += [
earlier["object"]["last_modified"]
<= later["object"]["last_modified"]
for earlier, later in zip(items[:-1], items[1:])
]
endpoint = json.get("next", None)
assert all(results)
| 39.215789
| 86
| 0.549054
| 681
| 7,451
| 5.627019
| 0.161527
| 0.082203
| 0.091858
| 0.040188
| 0.865344
| 0.85595
| 0.845511
| 0.845511
| 0.845511
| 0.845511
| 0
| 0.005391
| 0.377667
| 7,451
| 189
| 87
| 39.42328
| 0.821005
| 0.005771
| 0
| 0.766667
| 0
| 0
| 0.117202
| 0.086686
| 0
| 0
| 0
| 0
| 0.055556
| 1
| 0.027778
| false
| 0
| 0.022222
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0c66a4abf3d4bffddd545cdc0c68d5dcd0f0c500
| 7,054
|
py
|
Python
|
dags/kd03-dags/KD_FACTOR_LEVEL2_2016_03_16_kd05.py
|
ywf5566/airflow
|
e7872dddbf275729b2c42e2a4ff602a6df7d1536
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
dags/kd03-dags/KD_FACTOR_LEVEL2_2016_03_16_kd05.py
|
ywf5566/airflow
|
e7872dddbf275729b2c42e2a4ff602a6df7d1536
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
dags/kd03-dags/KD_FACTOR_LEVEL2_2016_03_16_kd05.py
|
ywf5566/airflow
|
e7872dddbf275729b2c42e2a4ff602a6df7d1536
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
from datetime import datetime
from airflow import DAG
from airflow.operators.bash_operator import BashOperator
default_args = {'owner': 'afroot03'}
dag = DAG('KD-FACTOR-LEVEL2-2016-03-16-kd05',
default_args=default_args,
schedule_interval=None,
start_date=datetime(2020, 12, 18, 18, 30))
fac_daily_l2_actbsdelta_ordercnt = BashOperator(task_id="fac_daily_l2_actbsdelta_ordercnt", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1099895 2016-03-16 2016-03-16 False ", dag=dag)
fac_daily_l2_madl_actntfl_to_proptinall_closecorr10 = BashOperator(task_id="fac_daily_l2_madl_actntfl_to_proptinall_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1442138 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_activebuy_turnover_close_propt = BashOperator(task_id="fac_daily_l2_activebuy_turnover_close_propt", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1327650 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_madlarge_activesell_turnover_closecorr10 = BashOperator(task_id="fac_daily_l2_madlarge_activesell_turnover_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1440914 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_firstn_mainforce_turnover_closecorr10 = BashOperator(task_id="fac_daily_l2_firstn_mainforce_turnover_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1441534 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_firstn_mainforce_turnover_netinflow_propt_m10 = BashOperator(task_id="fac_daily_l2_firstn_mainforce_turnover_netinflow_propt_m10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1442809 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_firstn_mainforce_sell_turnover_closecorr10 = BashOperator(task_id="fac_daily_l2_firstn_mainforce_sell_turnover_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1441492 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_madlarge_activebuy_turnover_closecorr10 = BashOperator(task_id="fac_daily_l2_madlarge_activebuy_turnover_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1440886 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_actnetinflow_turnover_proptinall_closecorr10 = BashOperator(task_id="fac_daily_l2_actnetinflow_turnover_proptinall_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1442800 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_firstn_mainforce_buy_turnover_closecorr10 = BashOperator(task_id="fac_daily_l2_firstn_mainforce_buy_turnover_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1441478 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_actbsdelta_volume_proptinall_mptorate = BashOperator(task_id="fac_daily_l2_actbsdelta_volume_proptinall_mptorate", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1384238 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_madlarge_netinflow_turnover_closecorr10 = BashOperator(task_id="fac_daily_l2_madlarge_netinflow_turnover_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1442082 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_madl_ntfl_to_actntfl_propt_closecorr10 = BashOperator(task_id="fac_daily_l2_madl_ntfl_to_actntfl_propt_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1442124 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_firstn_mf_ntfl_to_proptinall_closecorr10 = BashOperator(task_id="fac_daily_l2_firstn_mf_ntfl_to_proptinall_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1442828 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_madlarge_passivebuy_turnover_closecorr10 = BashOperator(task_id="fac_daily_l2_madlarge_passivebuy_turnover_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1441422 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_madl_ntfl_to_proptinall_closecorr10 = BashOperator(task_id="fac_daily_l2_madl_ntfl_to_proptinall_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1443180 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_actnetinflow_turnover_closecorr10 = BashOperator(task_id="fac_daily_l2_actnetinflow_turnover_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1441520 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_activesell_turnover_closecorr10 = BashOperator(task_id="fac_daily_l2_activesell_turnover_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1441464 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_madlarge_buy_turnover_pertrans_closecorr10 = BashOperator(task_id="fac_daily_l2_madlarge_buy_turnover_pertrans_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1442096 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_madlarge_sell_turnover_pertrans_closecorr10 = BashOperator(task_id="fac_daily_l2_madlarge_sell_turnover_pertrans_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1442462 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_madl_psvntfl_to_proptinall_closecorr10 = BashOperator(task_id="fac_daily_l2_madl_psvntfl_to_proptinall_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1442597 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_activebuy_turnover_closecorr10 = BashOperator(task_id="fac_daily_l2_activebuy_turnover_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1441450 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_firstn_mainforce_netinflow_turnover_closecorr10 = BashOperator(task_id="fac_daily_l2_firstn_mainforce_netinflow_turnover_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1441548 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_madlarge_passivesell_turnover_closecorr10 = BashOperator(task_id="fac_daily_l2_madlarge_passivesell_turnover_closecorr10", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1441254 2016-03-16 2016-04-16 False ", dag=dag)
fac_daily_l2_madl_actntfl_to_proptinall_msdelta = BashOperator(task_id="fac_daily_l2_madl_actntfl_to_proptinall_msdelta", bash_command="sh /usr/lib/quant/factor/factor_repo/kdfactor/scripts/factor-exec-2016-2.sh 1405255 2016-03-16 2016-03-16 False ", dag=dag)
| 176.35
| 285
| 0.84562
| 1,145
| 7,054
| 4.859389
| 0.092576
| 0.071891
| 0.089863
| 0.094357
| 0.923976
| 0.917505
| 0.917505
| 0.882279
| 0.862329
| 0.740474
| 0
| 0.128129
| 0.048483
| 7,054
| 39
| 286
| 180.871795
| 0.700834
| 0.005954
| 0
| 0
| 0
| 0.757576
| 0.586817
| 0.442288
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.060606
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 10
|
0cefe266e7bfeb569cc710e22479aad53888f734
| 3,634
|
py
|
Python
|
ECE-1160/Raspberry-Pi-IO/L2_1.py
|
zmattis/University_of_Pittsburgh
|
29ba0f4686f34d633b474bb792cf0e6cee8b0f1c
|
[
"MIT"
] | 6
|
2017-07-21T17:56:15.000Z
|
2021-07-05T09:25:12.000Z
|
ECE-1160/Raspberry-Pi-IO/L2_1.py
|
zmattis/University_of_Pittsburgh
|
29ba0f4686f34d633b474bb792cf0e6cee8b0f1c
|
[
"MIT"
] | null | null | null |
ECE-1160/Raspberry-Pi-IO/L2_1.py
|
zmattis/University_of_Pittsburgh
|
29ba0f4686f34d633b474bb792cf0e6cee8b0f1c
|
[
"MIT"
] | 4
|
2018-10-14T03:28:19.000Z
|
2021-03-04T07:41:07.000Z
|
#!/usr/bin/python
# course: ECE 1160
# laboratory: 2
# date: 10/04/18
# username: zmm15
# name: Zachary M. Mattis
# title: Raspberry Pi SenseHat LED
# description: Dynamic and Continuous display over SenseHat LED Matrix
from sense_hat import SenseHat
import color
import time
SLEEP_TIME = 1
NUM_SCROLL = 3
class Arrow(object):
left = [Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.WHITE, Color.RED, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.RED, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE,
Color.WHITE, Color.RED, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE,
Color.RED, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE,
Color.WHITE, Color.RED, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.RED, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.WHITE, Color.RED, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE]
center = [Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.RED, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.RED, Color.WHITE, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.WHITE, Color.RED, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.RED, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.WHITE, Color.RED, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.RED, Color.WHITE, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.RED, Color.WHITE, Color.WHITE]
right = [Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.RED,
Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.RED, Color.WHITE,
Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.RED, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.RED, Color.WHITE, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.RED, Color.WHITE, Color.WHITE,
Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.RED, Color.WHITE,
Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.WHITE, Color.RED]
blank = [Color.WHITE] * 64
sense_hat = SenseHat()
@staticmethod
def scroll(duration):
for i in duration:
sense_hat.set_pixels(this.right)
time.sleep(SLEEP_TIME)
sense_hat.set_pixels(this.center)
time.sleep(SLEEP_TIME)
sense_hat.set_pixels(this.left)
time.sleep(SLEEP_TIME)
sense_hat.set_pixels(this.blank)
time.sleep(SLEEP_TIME)
Arrow.scroll(NUM_SCROLL)
| 58.612903
| 117
| 0.668134
| 496
| 3,634
| 4.860887
| 0.120968
| 0.713397
| 1.051431
| 1.227706
| 0.836168
| 0.827457
| 0.827457
| 0.827457
| 0.827457
| 0.77893
| 0
| 0.005884
| 0.205008
| 3,634
| 61
| 118
| 59.57377
| 0.82866
| 0.064117
| 0
| 0.454545
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.022727
| false
| 0
| 0.068182
| 0
| 0.227273
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
0cfb7ebd21c693915aa0fb58a08e79c2c8af4723
| 132
|
py
|
Python
|
rawg/tag.py
|
graynk/FindGameBot
|
62e5d2ecb6538e63bce584e2ca367ae21cd47011
|
[
"Apache-2.0"
] | 2
|
2021-09-20T17:40:59.000Z
|
2021-10-09T13:48:49.000Z
|
rawg/tag.py
|
graynk/FindGameBot
|
62e5d2ecb6538e63bce584e2ca367ae21cd47011
|
[
"Apache-2.0"
] | null | null | null |
rawg/tag.py
|
graynk/FindGameBot
|
62e5d2ecb6538e63bce584e2ca367ae21cd47011
|
[
"Apache-2.0"
] | null | null | null |
from dataclasses import dataclass
from dataclasses_json import dataclass_json
@dataclass_json
@dataclass
class Tag:
name: str
| 14.666667
| 43
| 0.818182
| 17
| 132
| 6.176471
| 0.529412
| 0.285714
| 0.419048
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.151515
| 132
| 8
| 44
| 16.5
| 0.9375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0b224d7ac527c9f73df32fefbbda284e9f1b8762
| 4,690
|
py
|
Python
|
test/validation/operators/boundary/test_maxwell_boundary.py
|
ignacia-fp/bempp-cl
|
a65232558826e51e624b1a4f649b6a0ed5a7f551
|
[
"MIT"
] | 70
|
2019-09-04T15:15:05.000Z
|
2022-03-22T16:54:40.000Z
|
test/validation/operators/boundary/test_maxwell_boundary.py
|
ignacia-fp/bempp-cl
|
a65232558826e51e624b1a4f649b6a0ed5a7f551
|
[
"MIT"
] | 66
|
2020-01-16T08:31:00.000Z
|
2022-03-25T11:18:59.000Z
|
test/validation/operators/boundary/test_maxwell_boundary.py
|
ignacia-fp/bempp-cl
|
a65232558826e51e624b1a4f649b6a0ed5a7f551
|
[
"MIT"
] | 22
|
2019-09-30T08:50:33.000Z
|
2022-03-20T19:37:22.000Z
|
"""Unit tests for modified Helmholtz operators."""
# pylint: disable=redefined-outer-name
# pylint: disable=C0103
import numpy as _np
import pytest
pytestmark = pytest.mark.usefixtures("default_parameters", "helpers")
def test_maxwell_electric_field_sphere(
default_parameters, helpers, device_interface, precision
):
"""Test Maxwell electric field on sphere."""
from bempp.api import function_space
from bempp.api.operators.boundary.maxwell import electric_field
grid = helpers.load_grid("sphere")
space1 = function_space(grid, "RWG", 0)
space2 = function_space(grid, "SNC", 0)
discrete_op = electric_field(
space1,
space1,
space2,
2.5,
assembler="dense",
device_interface=device_interface,
precision=precision,
parameters=default_parameters,
).weak_form()
if precision == "single":
rtol = 1e-5
atol = 1e-7
else:
rtol = 1e-10
atol = 1e-14
expected = helpers.load_npy_data("maxwell_electric_field_boundary")
_np.testing.assert_allclose(discrete_op.to_dense(), expected, rtol=rtol, atol=atol)
def test_maxwell_electric_field_rbc_bc_sphere(
default_parameters, helpers, device_interface, precision, skip
):
"""Test Maxwell electric field on sphere with RBC/BC basis."""
if skip == "circleci":
pytest.skip()
import bempp.api
from bempp.api import function_space
from bempp.api.operators.boundary.maxwell import electric_field
grid = helpers.load_grid("sphere")
space1 = function_space(grid, "BC", 0)
space2 = function_space(grid, "RBC", 0)
rand = _np.random.RandomState(0)
vec = rand.rand(space1.global_dof_count)
bempp.api.GLOBAL_PARAMETERS.fmm.dense_evaluation = True
discrete_op = electric_field(
space1,
space1,
space2,
2.5,
assembler="fmm",
device_interface=device_interface,
precision=precision,
parameters=default_parameters,
).weak_form()
actual = discrete_op @ vec
bempp.api.GLOBAL_PARAMETERS.fmm.dense_evaluation = False
if precision == "single":
rtol = 5e-5
atol = 5e-6
else:
rtol = 1e-10
atol = 1e-14
mat = helpers.load_npy_data("maxwell_electric_field_boundary_rbc_bc")
expected = mat @ vec
_np.testing.assert_allclose(actual, expected, rtol=rtol, atol=atol)
bempp.api.clear_fmm_cache()
def test_maxwell_electric_field_bc_sphere(
default_parameters, helpers, device_interface, precision, skip
):
"""Test Maxwell electric field on sphere with BC basis."""
if skip == "circleci":
pytest.skip()
import bempp.api
from bempp.api import function_space
from bempp.api.operators.boundary.maxwell import electric_field
grid = helpers.load_grid("sphere")
space1 = function_space(grid, "BC", 0)
space2 = function_space(grid, "SNC", 0)
rand = _np.random.RandomState(0)
vec = rand.rand(space1.global_dof_count)
bempp.api.GLOBAL_PARAMETERS.fmm.dense_evaluation = True
discrete_op = electric_field(
space1,
space1,
space2,
2.5,
assembler="fmm",
device_interface=device_interface,
precision=precision,
parameters=default_parameters,
).weak_form()
actual = discrete_op @ vec
bempp.api.GLOBAL_PARAMETERS.fmm.dense_evaluation = False
if precision == "single":
rtol = 1e-4
atol = 5e-6
else:
rtol = 1e-10
atol = 1e-14
mat = helpers.load_npy_data("maxwell_electric_field_boundary_bc")
expected = mat @ vec
_np.testing.assert_allclose(actual, expected, rtol=rtol, atol=atol)
bempp.api.clear_fmm_cache()
def test_maxwell_magnetic_field_sphere(
default_parameters, helpers, device_interface, precision
):
"""Test Maxwell magnetic field on sphere."""
from bempp.api import function_space
from bempp.api.operators.boundary.maxwell import magnetic_field
grid = helpers.load_grid("sphere")
space1 = function_space(grid, "RWG", 0)
space2 = function_space(grid, "SNC", 0)
discrete_op = magnetic_field(
space1,
space1,
space2,
2.5,
assembler="dense",
device_interface=device_interface,
precision=precision,
parameters=default_parameters,
).weak_form()
if precision == "single":
rtol = 1e-5
atol = 1e-7
else:
rtol = 1e-10
atol = 1e-14
expected = helpers.load_npy_data("maxwell_magnetic_field_boundary")
_np.testing.assert_allclose(discrete_op.to_dense(), expected, rtol=rtol, atol=atol)
| 25.911602
| 87
| 0.66823
| 572
| 4,690
| 5.255245
| 0.166084
| 0.042582
| 0.05988
| 0.047904
| 0.92515
| 0.903859
| 0.901198
| 0.9002
| 0.897538
| 0.897538
| 0
| 0.023372
| 0.233689
| 4,690
| 180
| 88
| 26.055556
| 0.813022
| 0.06226
| 0
| 0.874016
| 0
| 0
| 0.059712
| 0.030657
| 0
| 0
| 0
| 0
| 0.031496
| 1
| 0.031496
| false
| 0
| 0.094488
| 0
| 0.125984
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0b3ebeeb76a9a12dc1a4a5e6ce851fbca71555d6
| 3,125
|
py
|
Python
|
Plot_power_cresc.py
|
mkraft89/To_eels_app
|
9e2fc6d501f51130b12164997f40cb7e0ec5a654
|
[
"Unlicense"
] | null | null | null |
Plot_power_cresc.py
|
mkraft89/To_eels_app
|
9e2fc6d501f51130b12164997f40cb7e0ec5a654
|
[
"Unlicense"
] | null | null | null |
Plot_power_cresc.py
|
mkraft89/To_eels_app
|
9e2fc6d501f51130b12164997f40cb7e0ec5a654
|
[
"Unlicense"
] | null | null | null |
#Plot the potential of the periodic surface with a dipole source
import numpy as np
import matplotlib.pyplot as plt
from Calc_power_cresc import Pow_abs_rad, \
Pow_abs_rad_r,\
Pow_abs_rad_hori,\
Pow_sca_rad, Pow_sca_r,\
Pow_sca_hori
def Absorption(R1_cyl, R2_cyl, inv, epos, sca, vel, orientation):
phi = np.arange(0,2*np.pi,0.001)
z_1 = sca / (R1_cyl*np.exp(1j*phi) - inv)
z_2 = sca / (R2_cyl*np.exp(1j*phi) - inv)
#Geometrical and physical constants in the cylinder frame
c = 3e8
Conv = 1.602e-19/6.626e-34*2*np.pi #Conversion from eV to SI-units
omega = np.arange(0.01, 8, 0.01)
c_e = vel*c #electron velocity
plt.figure(4)
plt.clf()
plt.subplot(111)
Q = np.zeros(np.size(omega))
if orientation==1:
x_e0 = min(np.real(z_2))
x_e = x_e0 + np.sign(x_e0)*epos
for m in range(0,np.size(omega)):
Q[m] = Pow_abs_rad(inv, x_e, c_e, sca, R1_cyl, R2_cyl, omega[m])
plt.plot(omega, Q/1.6e-19)
elif orientation==3:
x_e0 = max(np.real(z_2))
x_e = x_e0 + epos
for m in range(0,np.size(omega)):
Q[m] = Pow_abs_rad_r(inv, x_e, c_e, sca, R1_cyl, R2_cyl, omega[m])
plt.plot(omega, Q/1.6e-19)
else:
y_e0 = max(np.imag(z_1))
x_e = y_e0 + epos
for m in range(0,np.size(omega)):
Q[m] = Pow_abs_rad_hori(inv, x_e, c_e, sca, R1_cyl, R2_cyl, omega[m])
plt.plot(omega, Q/1.6e-19)
plt.yscale('log')
plt.xlabel('$\omega/eV$')
plt.ylabel('Q/eV')
plt.gcf().tight_layout()
plt.figure(4).canvas.draw()
def Scattering(R1_cyl, R2_cyl, inv, epos, sca, vel, orientation):
phi = np.arange(0,2*np.pi,0.001)
z_1 = sca / (R1_cyl*np.exp(1j*phi) - inv)
z_2 = sca / (R2_cyl*np.exp(1j*phi) - inv)
#Geometrical and physical constants in the cylinder frame
c = 3e8
Conv = 1.602e-19/6.626e-34*2*np.pi #Conversion from eV to SI-units
omega = np.arange(0.01, 8, 0.01)
c_e = vel*c #electron velocity
plt.figure(5)
plt.clf()
plt.subplot(111)
S = np.zeros(np.size(omega))
if orientation==1:
x_e0 = min(np.real(z_2))
x_e = x_e0 + np.sign(x_e0)*epos
for m in range(0,np.size(omega)):
S[m] = Pow_sca_rad(inv, x_e, c_e, sca, R1_cyl, R2_cyl, omega[m])
plt.plot(omega, S/1.6e-19)
elif orientation==3:
x_e0 = max(np.real(z_2))
x_e = x_e0 + epos
for m in range(0,np.size(omega)):
S[m] = Pow_sca_r(inv, x_e, c_e, sca, R1_cyl, R2_cyl, omega[m])
plt.plot(omega, S/1.6e-19)
else:
y_e0 = max(np.imag(z_1))
x_e = y_e0 + epos
for m in range(0,np.size(omega)):
S[m] = Pow_sca_hori(inv, x_e, c_e, sca, R1_cyl, R2_cyl, omega[m])
plt.plot(omega, S/1.6e-19)
plt.yscale('log')
plt.xlabel('$\omega/eV$')
plt.ylabel('Scattering/eV')
plt.gcf().tight_layout()
plt.figure(5).canvas.draw()
| 26.483051
| 81
| 0.55744
| 561
| 3,125
| 2.934046
| 0.192513
| 0.014581
| 0.034022
| 0.048603
| 0.854192
| 0.831106
| 0.831106
| 0.797084
| 0.797084
| 0.797084
| 0
| 0.067059
| 0.29376
| 3,125
| 117
| 82
| 26.709402
| 0.678749
| 0.08608
| 0
| 0.701299
| 0
| 0
| 0.015817
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025974
| false
| 0
| 0.038961
| 0
| 0.064935
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0b46a745a6dcdb583716115ab3708a7928a01133
| 376
|
py
|
Python
|
tests/bool/boolop.py
|
chrivers/pyjaco
|
8ad793dce34ab7aed3b973aae729d6a943a2381c
|
[
"MIT"
] | 38
|
2015-01-01T18:08:59.000Z
|
2022-02-18T08:57:27.000Z
|
tests/bool/boolop.py
|
chrivers/pyjaco
|
8ad793dce34ab7aed3b973aae729d6a943a2381c
|
[
"MIT"
] | 1
|
2020-01-08T04:32:52.000Z
|
2020-01-08T04:32:52.000Z
|
tests/bool/boolop.py
|
chrivers/pyjaco
|
8ad793dce34ab7aed3b973aae729d6a943a2381c
|
[
"MIT"
] | 12
|
2016-03-07T09:30:49.000Z
|
2021-09-05T20:38:47.000Z
|
A = [1, 2, 3, 4, 5, 6]
B = [1, 2, 3, 4, 5, 6]
C = [1, 2, 3, 4, 5, 6]
for a in A:
for b in B:
for c in C:
if a > b > c:
print ">", (a, b, c)
if a < b < c:
print ">", (a, b, c)
if a >= b >= c:
print ">=", (a, b, c)
if a <= b <= c:
print "<=", (a, b, c)
| 23.5
| 37
| 0.25
| 65
| 376
| 1.446154
| 0.2
| 0.170213
| 0.255319
| 0.212766
| 0.755319
| 0.755319
| 0.56383
| 0.56383
| 0.56383
| 0.56383
| 0
| 0.105263
| 0.545213
| 376
| 15
| 38
| 25.066667
| 0.444444
| 0
| 0
| 0.142857
| 0
| 0
| 0.015957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0.285714
| 0
| 0
| 1
| null | 0
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0baa1f03a286d754b92f9b1d9c53e6b98745032e
| 36,998
|
py
|
Python
|
SGL-pdarts/genotypes.py
|
chaitanyaspatil/DeepLearningSGL
|
3f6b55030277d109b09441a7a6a48f8be3b45d57
|
[
"MIT"
] | null | null | null |
SGL-pdarts/genotypes.py
|
chaitanyaspatil/DeepLearningSGL
|
3f6b55030277d109b09441a7a6a48f8be3b45d57
|
[
"MIT"
] | null | null | null |
SGL-pdarts/genotypes.py
|
chaitanyaspatil/DeepLearningSGL
|
3f6b55030277d109b09441a7a6a48f8be3b45d57
|
[
"MIT"
] | null | null | null |
from collections import namedtuple
Genotype = namedtuple('Genotype', 'normal normal_concat reduce reduce_concat')
PRIMITIVES = [
'none',
'max_pool_3x3',
'avg_pool_3x3',
'skip_connect',
'sep_conv_3x3',
'sep_conv_5x5',
'dil_conv_3x3',
'dil_conv_5x5'
]
NASNet = Genotype(
normal = [
('sep_conv_5x5', 1),
('sep_conv_3x3', 0),
('sep_conv_5x5', 0),
('sep_conv_3x3', 0),
('avg_pool_3x3', 1),
('skip_connect', 0),
('avg_pool_3x3', 0),
('avg_pool_3x3', 0),
('sep_conv_3x3', 1),
('skip_connect', 1),
],
normal_concat = [2, 3, 4, 5, 6],
reduce = [
('sep_conv_5x5', 1),
('sep_conv_7x7', 0),
('max_pool_3x3', 1),
('sep_conv_7x7', 0),
('avg_pool_3x3', 1),
('sep_conv_5x5', 0),
('skip_connect', 3),
('avg_pool_3x3', 2),
('sep_conv_3x3', 2),
('max_pool_3x3', 1),
],
reduce_concat = [4, 5, 6],
)
AmoebaNet = Genotype(
normal = [
('avg_pool_3x3', 0),
('max_pool_3x3', 1),
('sep_conv_3x3', 0),
('sep_conv_5x5', 2),
('sep_conv_3x3', 0),
('avg_pool_3x3', 3),
('sep_conv_3x3', 1),
('skip_connect', 1),
('skip_connect', 0),
('avg_pool_3x3', 1),
],
normal_concat = [4, 5, 6],
reduce = [
('avg_pool_3x3', 0),
('sep_conv_3x3', 1),
('max_pool_3x3', 0),
('sep_conv_7x7', 2),
('sep_conv_7x7', 0),
('avg_pool_3x3', 1),
('max_pool_3x3', 0),
('max_pool_3x3', 1),
('conv_7x1_1x7', 0),
('sep_conv_3x3', 5),
],
reduce_concat = [3, 4, 6]
)
DARTS_V1 = Genotype(normal=[('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('skip_connect', 2)], normal_concat=[2, 3, 4, 5], reduce=[('max_pool_3x3', 0), ('max_pool_3x3', 1), ('skip_connect', 2), ('max_pool_3x3', 0), ('max_pool_3x3', 0), ('skip_connect', 2), ('skip_connect', 2), ('avg_pool_3x3', 0)], reduce_concat=[2, 3, 4, 5])
DARTS_V2 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 1), ('skip_connect', 0), ('skip_connect', 0), ('dil_conv_3x3', 2)], normal_concat=[2, 3, 4, 5], reduce=[('max_pool_3x3', 0), ('max_pool_3x3', 1), ('skip_connect', 2), ('max_pool_3x3', 1), ('max_pool_3x3', 0), ('skip_connect', 2), ('skip_connect', 2), ('max_pool_3x3', 1)], reduce_concat=[2, 3, 4, 5])
PDARTS = Genotype(normal=[('skip_connect', 0), ('dil_conv_3x3', 1), ('skip_connect', 0),('sep_conv_3x3', 1), ('sep_conv_3x3', 1), ('sep_conv_3x3', 3), ('sep_conv_3x3',0), ('dil_conv_5x5', 4)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('sep_conv_5x5', 1), ('sep_conv_3x3', 0), ('dil_conv_5x5', 2), ('max_pool_3x3', 0), ('dil_conv_3x3', 1), ('dil_conv_3x3', 1), ('dil_conv_5x5', 3)], reduce_concat=range(2, 6))
PDARTS_TS_CIFAR10 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('dil_conv_3x3', 2), ('skip_connect', 0), ('dil_conv_3x3', 3), ('skip_connect', 2), ('dil_conv_5x5', 4)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('dil_conv_3x3', 1), ('avg_pool_3x3', 0), ('dil_conv_5x5', 2), ('skip_connect', 0), ('sep_conv_3x3', 3), ('sep_conv_3x3', 0), ('dil_conv_3x3', 2)], reduce_concat=range(2, 6))
PDARTS_TS_CIFAR100 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_5x5', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('skip_connect', 2), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('dil_conv_3x3', 0), ('avg_pool_3x3', 1), ('dil_conv_3x3', 1), ('sep_conv_5x5', 2), ('sep_conv_5x5', 1), ('sep_conv_5x5', 2), ('avg_pool_3x3', 0), ('dil_conv_5x5', 2)], reduce_concat=range(2, 6))
PDARTS_TS_CIFAR100_GAMMA_2 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 2), ('skip_connect', 0), ('sep_conv_3x3', 2), ('sep_conv_3x3', 0), ('sep_conv_3x3', 2)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('avg_pool_3x3', 1), ('avg_pool_3x3', 0), ('dil_conv_5x5', 2), ('avg_pool_3x3', 0), ('sep_conv_5x5', 2), ('avg_pool_3x3', 0), ('sep_conv_5x5', 4)], reduce_concat=range(2, 6))
PDARTS_TS_CIFAR100_GAMMA_3 = Genotype(normal=[('sep_conv_3x3', 0), ('dil_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_5x5', 2), ('sep_conv_3x3', 0), ('dil_conv_5x5', 1)], normal_concat=range(2, 6), reduce=[('skip_connect', 0), ('max_pool_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('avg_pool_3x3', 0), ('dil_conv_5x5', 3), ('dil_conv_3x3', 3), ('dil_conv_3x3', 4)], reduce_concat=range(2, 6))
PDARTS_TS_CIFAR100_GAMMA_0_1 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_3x3', 2), ('skip_connect', 0), ('sep_conv_3x3', 2), ('sep_conv_3x3', 0), ('dil_conv_3x3', 3)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('dil_conv_5x5', 1), ('avg_pool_3x3', 0), ('sep_conv_3x3', 2), ('dil_conv_5x5', 2), ('dil_conv_3x3', 3), ('avg_pool_3x3', 0), ('sep_conv_5x5', 2)], reduce_concat=range(2, 6))
PDARTS_TS_CIFAR100_GAMMA_0_5 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('skip_connect', 2), ('sep_conv_3x3', 1), ('sep_conv_5x5', 4)], normal_concat=range(2, 6), reduce=[('sep_conv_3x3', 0), ('skip_connect', 1), ('avg_pool_3x3', 0), ('sep_conv_3x3', 2), ('avg_pool_3x3', 0), ('sep_conv_3x3', 1), ('avg_pool_3x3', 0), ('dil_conv_5x5', 2)], reduce_concat=range(2, 6))
DARTS_TS_18_CIFAR10_GAMMA_0_5 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_5x5', 1), ('skip_connect', 0), ('dil_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('skip_connect', 1), ('skip_connect', 2), ('max_pool_3x3', 0), ('max_pool_3x3', 0), ('skip_connect', 2), ('skip_connect', 2), ('max_pool_3x3', 0)], reduce_concat=range(2, 6))
DARTS_TS_18_CIFAR10_GAMMA_0_1 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('skip_connect', 1), ('skip_connect', 0), ('dil_conv_5x5', 1), ('skip_connect', 0), ('dil_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('avg_pool_3x3', 1), ('max_pool_3x3', 0), ('skip_connect', 2), ('max_pool_3x3', 0), ('skip_connect', 2), ('skip_connect', 2), ('avg_pool_3x3', 0)], reduce_concat=range(2, 6))
DARTS_TS_18_CIFAR10_GAMMA_2 = Genotype(normal=[('skip_connect', 0), ('sep_conv_5x5', 1), ('skip_connect', 0), ('skip_connect', 1), ('sep_conv_3x3', 0), ('dil_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('avg_pool_3x3', 1), ('skip_connect', 2), ('avg_pool_3x3', 0), ('skip_connect', 2), ('avg_pool_3x3', 0), ('skip_connect', 2), ('skip_connect', 3)], reduce_concat=range(2, 6))
DARTS_TS_18_CIFAR10_GAMMA_3 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('skip_connect', 1), ('skip_connect', 0), ('dil_conv_5x5', 1), ('skip_connect', 0), ('dil_conv_5x5', 1)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 1), ('avg_pool_3x3', 0), ('skip_connect', 2), ('avg_pool_3x3', 1), ('skip_connect', 2), ('avg_pool_3x3', 0), ('skip_connect', 2), ('avg_pool_3x3', 0)], reduce_concat=range(2, 6))
DARTS_TS_18_CIFAR10_LAMBDA_2 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('dil_conv_5x5', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('skip_connect', 0), ('avg_pool_3x3', 1), ('skip_connect', 2), ('max_pool_3x3', 0), ('avg_pool_3x3', 1), ('skip_connect', 2), ('skip_connect', 2), ('skip_connect', 3)], reduce_concat=range(2, 6))
DARTS_TS_18_CIFAR10_LAMBDA_0_1 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('skip_connect', 2), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 1), ('avg_pool_3x3', 0), ('skip_connect', 2), ('max_pool_3x3', 0), ('skip_connect', 2), ('skip_connect', 3), ('skip_connect', 2), ('avg_pool_3x3', 0)], reduce_concat=range(2, 6))
DARTS_TS_18_CIFAR10_LAMBDA_0_5 = Genotype(normal=[('sep_conv_3x3', 0), ('dil_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_5x5', 1), ('skip_connect', 0), ('dil_conv_3x3', 1), ('skip_connect', 0), ('skip_connect', 1)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('dil_conv_3x3', 1), ('skip_connect', 2), ('max_pool_3x3', 0), ('skip_connect', 2), ('skip_connect', 3), ('skip_connect', 3), ('skip_connect', 2)], reduce_concat=range(2, 6))
# PDARTS_TS_18_CIFAR100_LAMBDA_0_5 = Genotype(normal=[('skip_connect', 0), ('sep_conv_5x5', 1), ('sep_conv_3x3', 0), ('skip_connect', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 3), ('sep_conv_3x3', 0), ('dil_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('skip_connect', 0), ('skip_connect', 1), ('skip_connect', 0), ('skip_connect', 1), ('avg_pool_3x3', 0), ('avg_pool_3x3', 1), ('avg_pool_3x3', 0), ('dil_conv_5x5', 4)], reduce_concat=range(2, 6))
# DARTS_TS_18_CIFAR10_LAMBDA_0_5 = Genotype(normal=[('sep_conv_3x3', 0), ('dil_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_5x5', 1), ('skip_connect', 0), ('dil_conv_3x3', 1), ('skip_connect', 0), ('skip_connect', 1)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('dil_conv_3x3', 1), ('skip_connect', 2), ('max_pool_3x3', 0), ('skip_connect', 2), ('skip_connect', 3), ('skip_connect', 3), ('skip_connect', 2)], reduce_concat=range(2, 6))
DARTS_TS_18_CIFAR10_LAMBDA_3 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('dil_conv_5x5', 1), ('skip_connect', 2), ('avg_pool_3x3', 0), ('avg_pool_3x3', 0), ('skip_connect', 2), ('skip_connect', 2), ('avg_pool_3x3', 0)], reduce_concat=range(2, 6))
PDARTS_TS_18_CIFAR100_LAMBDA_3 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_5x5', 1), ('dil_conv_3x3', 1), ('sep_conv_3x3', 2), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 1), ('sep_conv_3x3', 2), ('max_pool_3x3', 0), ('sep_conv_5x5', 1), ('dil_conv_3x3', 1), ('dil_conv_3x3', 2)], reduce_concat=range(2, 6))
PDARTS_TS_18_CIFAR100_LAMBDA_0_1 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('dil_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('sep_conv_5x5', 1), ('avg_pool_3x3', 0), ('sep_conv_3x3', 1), ('avg_pool_3x3', 0), ('dil_conv_5x5', 3), ('avg_pool_3x3', 0), ('dil_conv_3x3', 1)], reduce_concat=range(2, 6))
PDARTS_TS_18_CIFAR100_LAMBDA_0_5 = Genotype(normal=[('skip_connect', 0), ('sep_conv_5x5', 1), ('sep_conv_3x3', 0), ('skip_connect', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 3), ('sep_conv_3x3', 0), ('dil_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('skip_connect', 0), ('skip_connect', 1), ('skip_connect', 0), ('skip_connect', 1), ('avg_pool_3x3', 0), ('avg_pool_3x3', 1), ('avg_pool_3x3', 0), ('dil_conv_5x5', 4)], reduce_concat=range(2, 6))
PDARTS_TS_18_CIFAR100_LAMBDA_2 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_5x5', 2), ('sep_conv_3x3', 0), ('dil_conv_5x5', 4)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('skip_connect', 1), ('avg_pool_3x3', 0), ('sep_conv_5x5', 1), ('avg_pool_3x3', 0), ('sep_conv_3x3', 2), ('max_pool_3x3', 0), ('dil_conv_3x3', 1)], reduce_concat=range(2, 6))
PDARTS_TS_18_CIFAR100_AB_1 = Genotype(normal=[('dil_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 2), ('sep_conv_3x3', 0), ('dil_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 3)], normal_concat=range(2, 6), reduce=[('skip_connect', 0), ('dil_conv_5x5', 1), ('avg_pool_3x3', 0), ('skip_connect', 1), ('skip_connect', 0), ('sep_conv_5x5', 2), ('skip_connect', 0), ('sep_conv_3x3', 1)], reduce_concat=range(2, 6))
PDARTS_TS_18_CIFAR100_AB_4 = Genotype(normal=[('sep_conv_3x3', 0), ('dil_conv_3x3', 1), ('sep_conv_3x3', 0), ('skip_connect', 1), ('max_pool_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_5x5', 1)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('skip_connect', 1), ('avg_pool_3x3', 0), ('sep_conv_5x5', 2), ('avg_pool_3x3', 0), ('dil_conv_3x3', 1), ('avg_pool_3x3', 0), ('sep_conv_3x3', 4)], reduce_concat=range(2, 6))
DARTS_TS_18_CIFAR10_AB_1 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_5x5', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('dil_conv_5x5', 1), ('skip_connect', 2), ('max_pool_3x3', 0), ('skip_connect', 2), ('avg_pool_3x3', 0), ('skip_connect', 2), ('skip_connect', 3)], reduce_concat=range(2, 6))
DARTS_TS_18_CIFAR10_AB_4 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('dil_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 2), ('max_pool_3x3', 1), ('skip_connect', 2), ('dil_conv_3x3', 3), ('skip_connect', 2), ('skip_connect', 4)], reduce_concat=range(2, 6))
PDARTS_TUNED_CIFAR100 = Genotype(normal=[('sep_conv_3x3', 0), ('skip_connect', 1), ('sep_conv_3x3', 0), ('skip_connect', 1), ('sep_conv_3x3', 0), ('sep_conv_5x5', 3), ('sep_conv_5x5', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('sep_conv_3x3', 1), ('avg_pool_3x3', 0), ('dil_conv_3x3', 2), ('max_pool_3x3', 1), ('sep_conv_3x3', 3), ('sep_conv_5x5', 1), ('dil_conv_5x5', 4)], reduce_concat=range(2, 6))
PDARTS_TUNED_CIFAR10 = Genotype(normal=[('skip_connect', 0), ('dil_conv_5x5', 1), ('skip_connect', 0), ('sep_conv_5x5', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('dil_conv_5x5', 1), ('avg_pool_3x3', 0), ('dil_conv_3x3', 2), ('avg_pool_3x3', 0), ('sep_conv_3x3', 3), ('avg_pool_3x3', 0), ('sep_conv_5x5', 3)], reduce_concat=range(2, 6))
PDARTS_TS_50_CIFAR10 = Genotype(normal=[('sep_conv_3x3', 0), ('dil_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_5x5', 2), ('sep_conv_3x3', 0), ('dil_conv_5x5', 3), ('skip_connect', 0), ('dil_conv_5x5', 4)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('skip_connect', 1), ('max_pool_3x3', 0), ('sep_conv_5x5', 1), ('avg_pool_3x3', 0), ('skip_connect', 1), ('avg_pool_3x3', 0), ('dil_conv_3x3', 1)], reduce_concat=range(2, 6))
PDARTS_TS_50_CIFAR100 = Genotype(normal=[('skip_connect', 0), ('dil_conv_5x5', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('max_pool_3x3', 0), ('skip_connect', 2), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('sep_conv_3x3', 1), ('avg_pool_3x3', 0), ('dil_conv_3x3', 2), ('avg_pool_3x3', 0), ('sep_conv_3x3', 3), ('avg_pool_3x3', 0), ('sep_conv_3x3', 2)], reduce_concat=range(2, 6))
TUNED_FURTHER_GAMMA_0_1 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_5x5', 1), ('dil_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_5x5', 2), ('sep_conv_3x3', 0), ('dil_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('dil_conv_3x3', 1), ('sep_conv_5x5', 0), ('dil_conv_3x3', 2), ('avg_pool_3x3', 0), ('avg_pool_3x3', 1), ('sep_conv_5x5', 1), ('sep_conv_5x5', 4)], reduce_concat=range(2, 6))
TUNED_FURTHER_GAMMA_0_5 = Genotype(normal=[('dil_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_3x3', 2), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('sep_conv_3x3', 1), ('skip_connect', 2)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('avg_pool_3x3', 1), ('avg_pool_3x3', 0), ('sep_conv_3x3', 1), ('avg_pool_3x3', 1), ('dil_conv_3x3', 3), ('avg_pool_3x3', 1), ('dil_conv_3x3', 2)], reduce_concat=range(2, 6))
TUNED_FURTHER_GAMMA_2 = Genotype(normal=[('sep_conv_3x3', 0), ('dil_conv_5x5', 1), ('skip_connect', 0), ('dil_conv_5x5', 1), ('sep_conv_3x3', 0), ('dil_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_3x3', 3)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('skip_connect', 1), ('max_pool_3x3', 0), ('sep_conv_3x3', 1), ('avg_pool_3x3', 0), ('sep_conv_5x5', 1), ('dil_conv_5x5', 0), ('skip_connect', 1)], reduce_concat=range(2, 6))
TUNED_FURTHER_GAMMA_3 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_3x3', 2), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_5x5', 4)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('avg_pool_3x3', 1), ('avg_pool_3x3', 0), ('skip_connect', 1), ('avg_pool_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_3x3', 1)], reduce_concat=range(2, 6))
TUNED_FURTHER_LAMBDA_0_1 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_5x5', 3), ('dil_conv_3x3', 1), ('dil_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('dil_conv_5x5', 1), ('avg_pool_3x3', 0), ('dil_conv_3x3', 2), ('dil_conv_5x5', 1), ('sep_conv_5x5', 2), ('avg_pool_3x3', 0), ('dil_conv_3x3', 3)], reduce_concat=range(2, 6))
TUNED_FURTHER_LAMBDA_0_5 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_5x5', 2), ('sep_conv_3x3', 0), ('dil_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('sep_conv_5x5', 1), ('dil_conv_5x5', 1), ('sep_conv_3x3', 2), ('avg_pool_3x3', 0), ('sep_conv_5x5', 1), ('avg_pool_3x3', 0), ('sep_conv_3x3', 2)], reduce_concat=range(2, 6))
TUNED_FURTHER_LAMBDA_2 = Genotype(normal=[('skip_connect', 0), ('dil_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_5x5', 1), ('dil_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('dil_conv_5x5', 3)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('avg_pool_3x3', 1), ('avg_pool_3x3', 0), ('dil_conv_3x3', 1), ('avg_pool_3x3', 0), ('sep_conv_3x3', 2), ('avg_pool_3x3', 0), ('dil_conv_5x5', 4)], reduce_concat=range(2, 6))
TUNED_FURTHER_LAMBDA_3 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_3x3', 3), ('sep_conv_3x3', 0), ('dil_conv_5x5', 3)], normal_concat=range(2, 6), reduce=[('skip_connect', 0), ('avg_pool_3x3', 1), ('sep_conv_3x3', 1), ('dil_conv_5x5', 2), ('avg_pool_3x3', 0), ('skip_connect', 1), ('max_pool_3x3', 0), ('dil_conv_3x3', 1)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR10_1_LAMBDA_0_1 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_3x3', 2), ('sep_conv_3x3', 0), ('sep_conv_5x5', 3), ('sep_conv_3x3', 1), ('dil_conv_5x5', 2)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('dil_conv_3x3', 1), ('skip_connect', 0), ('skip_connect', 1), ('sep_conv_3x3', 0), ('avg_pool_3x3', 1), ('skip_connect', 0), ('avg_pool_3x3', 1)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR10_2_LAMBDA_0_1 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('dil_conv_5x5', 3), ('sep_conv_3x3', 0), ('dil_conv_3x3', 3)], normal_concat=range(2, 6), reduce=[('skip_connect', 0), ('max_pool_3x3', 1), ('sep_conv_3x3', 0), ('dil_conv_3x3', 1), ('avg_pool_3x3', 1), ('skip_connect', 3), ('avg_pool_3x3', 0), ('avg_pool_3x3', 1)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR10_2_LAMBDA_1 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_5x5', 1), ('sep_conv_3x3', 0), ('dil_conv_5x5', 3), ('skip_connect', 2), ('sep_conv_3x3', 3)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('skip_connect', 1), ('avg_pool_3x3', 0), ('skip_connect', 1), ('dil_conv_3x3', 2), ('sep_conv_5x5', 3), ('dil_conv_5x5', 3), ('skip_connect', 4)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR10_1_LAMBDA_1 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_5x5', 2), ('sep_conv_3x3', 2), ('dil_conv_5x5', 3), ('sep_conv_3x3', 0), ('dil_conv_3x3', 3)], normal_concat=range(2, 6), reduce=[('skip_connect', 0), ('sep_conv_3x3', 1), ('max_pool_3x3', 1), ('sep_conv_3x3', 2), ('skip_connect', 0), ('sep_conv_3x3', 2), ('skip_connect', 0), ('skip_connect', 4)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR10_1_LAMBDA_3 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_5x5', 1), ('sep_conv_5x5', 1), ('dil_conv_3x3', 2), ('skip_connect', 0), ('sep_conv_5x5', 1), ('skip_connect', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('avg_pool_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 2), ('dil_conv_3x3', 1), ('dil_conv_5x5', 2), ('max_pool_3x3', 0), ('sep_conv_5x5', 3)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR10_2_LAMBDA_3 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 2), ('sep_conv_3x3', 1), ('dil_conv_3x3', 3), ('sep_conv_3x3', 2), ('dil_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('sep_conv_3x3', 1), ('max_pool_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_5x5', 2), ('max_pool_3x3', 0), ('dil_conv_5x5', 3)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR10_1_LAMBDA_2 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('dil_conv_5x5', 2), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('skip_connect', 0), ('dil_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('dil_conv_3x3', 1), ('max_pool_3x3', 1), ('dil_conv_5x5', 2), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('skip_connect', 0), ('skip_connect', 1)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR10_2_LAMBDA_2 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_5x5', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_5x5', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('avg_pool_3x3', 1), ('dil_conv_3x3', 1), ('skip_connect', 2), ('avg_pool_3x3', 1), ('dil_conv_5x5', 3), ('sep_conv_3x3', 2), ('dil_conv_5x5', 3)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR10_1_LAMBDA_0_5 = Genotype(normal=[('skip_connect', 0), ('sep_conv_5x5', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 1), ('sep_conv_5x5', 3), ('dil_conv_3x3', 3), ('dil_conv_5x5', 4)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('skip_connect', 1), ('skip_connect', 0), ('skip_connect', 1), ('skip_connect', 0), ('max_pool_3x3', 1), ('max_pool_3x3', 0), ('sep_conv_3x3', 1)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR10_2_LAMBDA_0_5 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_5x5', 2), ('sep_conv_3x3', 0), ('sep_conv_3x3', 3), ('sep_conv_5x5', 0), ('sep_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('avg_pool_3x3', 0), ('max_pool_3x3', 1), ('sep_conv_3x3', 1), ('skip_connect', 3), ('max_pool_3x3', 0), ('sep_conv_5x5', 3)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR100_1_LAMBDA_1 = Genotype(normal=[('sep_conv_3x3', 0), ('dil_conv_5x5', 1), ('sep_conv_3x3', 0), ('skip_connect', 1), ('sep_conv_3x3', 1), ('sep_conv_3x3', 3), ('skip_connect', 1), ('sep_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('skip_connect', 0), ('skip_connect', 1), ('avg_pool_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('max_pool_3x3', 1), ('dil_conv_3x3', 1), ('skip_connect', 2)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR100_2_LAMBDA_1 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 3), ('sep_conv_3x3', 1), ('sep_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('sep_conv_5x5', 0), ('skip_connect', 1), ('max_pool_3x3', 1), ('sep_conv_3x3', 2), ('skip_connect', 0), ('sep_conv_3x3', 2), ('sep_conv_5x5', 1), ('sep_conv_3x3', 4)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR100_1_LAMBDA_0_5 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 2), ('skip_connect', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('avg_pool_3x3', 1), ('avg_pool_3x3', 0), ('sep_conv_5x5', 1), ('avg_pool_3x3', 0), ('avg_pool_3x3', 1), ('avg_pool_3x3', 1), ('dil_conv_5x5', 3)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR100_2_LAMBDA_0_5 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_5x5', 4)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('sep_conv_5x5', 1), ('skip_connect', 1), ('dil_conv_3x3', 2), ('avg_pool_3x3', 1), ('dil_conv_5x5', 3), ('dil_conv_5x5', 2), ('dil_conv_5x5', 4)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR100_1_LAMBDA_2 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 2), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2)], normal_concat=range(2, 6), reduce=[('skip_connect', 0), ('sep_conv_3x3', 1), ('max_pool_3x3', 0), ('sep_conv_3x3', 2), ('sep_conv_5x5', 1), ('skip_connect', 2), ('sep_conv_5x5', 1), ('sep_conv_3x3', 3)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR100_2_LAMBDA_2 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 3), ('sep_conv_3x3', 2), ('sep_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('sep_conv_5x5', 0), ('max_pool_3x3', 1), ('sep_conv_3x3', 1), ('dil_conv_3x3', 2), ('sep_conv_3x3', 1), ('skip_connect', 3), ('sep_conv_3x3', 2), ('dil_conv_5x5', 4)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR100_1_LAMBDA_3 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('sep_conv_3x3', 2), ('sep_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('max_pool_3x3', 1), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('sep_conv_3x3', 1), ('sep_conv_5x5', 2), ('sep_conv_3x3', 2), ('sep_conv_3x3', 3)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR100_2_LAMBDA_3 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 2), ('sep_conv_3x3', 2), ('sep_conv_3x3', 3), ('sep_conv_3x3', 0), ('sep_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('sep_conv_5x5', 1), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('dil_conv_5x5', 2), ('sep_conv_3x3', 3), ('sep_conv_5x5', 1), ('sep_conv_3x3', 4)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR100_1_LAMBDA_0_1 = Genotype(normal=[('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 2), ('sep_conv_3x3', 0), ('sep_conv_3x3', 2), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('skip_connect', 1), ('avg_pool_3x3', 0), ('skip_connect', 2), ('skip_connect', 1), ('sep_conv_3x3', 3), ('avg_pool_3x3', 0), ('max_pool_3x3', 1)], reduce_concat=range(2, 6))
# PDARTS_COOP_CIFAR100_2_LAMBDA_0_1 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 2), ('sep_conv_3x3', 0), ('sep_conv_3x3', 3), ('skip_connect', 0), ('dil_conv_5x5', 3)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('max_pool_3x3', 1), ('skip_connect', 0), ('max_pool_3x3', 1), ('skip_connect', 2), ('dil_conv_5x5', 3), ('sep_conv_3x3', 1), ('sep_conv_3x3', 4)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR10_1_LAMBDA_1 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_5x5', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_3x3', 3), ('dil_conv_3x3', 2), ('sep_conv_5x5', 3)], normal_concat=range(2, 6), reduce=[('skip_connect', 0), ('sep_conv_3x3', 1), ('max_pool_3x3', 0), ('dil_conv_5x5', 2), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_5x5', 1), ('dil_conv_5x5', 4)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR10_2_LAMBDA_1 = Genotype(normal=[('skip_connect', 0), ('sep_conv_5x5', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('dil_conv_5x5', 2), ('dil_conv_3x3', 3), ('sep_conv_5x5', 0), ('dil_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('max_pool_3x3', 1), ('max_pool_3x3', 0), ('max_pool_3x3', 1), ('max_pool_3x3', 0), ('skip_connect', 1), ('dil_conv_3x3', 3), ('dil_conv_3x3', 4)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR10_1_LAMBDA_0_1 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_5x5', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 1), ('dil_conv_3x3', 3), ('skip_connect', 0), ('dil_conv_3x3', 3)], normal_concat=range(2, 6), reduce=[('skip_connect', 0), ('avg_pool_3x3', 1), ('max_pool_3x3', 0), ('avg_pool_3x3', 1), ('skip_connect', 0), ('max_pool_3x3', 1), ('skip_connect', 0), ('dil_conv_3x3', 4)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR10_2_LAMBDA_0_1 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 3), ('dil_conv_5x5', 3), ('dil_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('skip_connect', 0), ('avg_pool_3x3', 1), ('avg_pool_3x3', 0), ('max_pool_3x3', 1), ('sep_conv_5x5', 1), ('dil_conv_3x3', 2), ('max_pool_3x3', 1), ('skip_connect', 2)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR10_1_LAMBDA_0_5 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('dil_conv_3x3', 3), ('skip_connect', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('sep_conv_3x3', 0), ('max_pool_3x3', 1), ('avg_pool_3x3', 0), ('sep_conv_3x3', 2), ('avg_pool_3x3', 1), ('sep_conv_3x3', 3), ('sep_conv_3x3', 0), ('dil_conv_3x3', 4)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR10_2_LAMBDA_0_5 = Genotype(normal=[('sep_conv_5x5', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('dil_conv_5x5', 2), ('skip_connect', 0), ('dil_conv_5x5', 3), ('sep_conv_3x3', 1), ('dil_conv_3x3', 3)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('sep_conv_5x5', 1), ('sep_conv_3x3', 1), ('dil_conv_5x5', 2), ('skip_connect', 0), ('dil_conv_5x5', 3), ('avg_pool_3x3', 1), ('dil_conv_5x5', 3)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR10_1_LAMBDA_2 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 2), ('sep_conv_5x5', 2), ('dil_conv_5x5', 3), ('skip_connect', 0), ('dil_conv_5x5', 4)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('sep_conv_3x3', 1), ('dil_conv_3x3', 1), ('sep_conv_5x5', 2), ('sep_conv_5x5', 0), ('dil_conv_5x5', 3), ('sep_conv_5x5', 2), ('dil_conv_5x5', 4)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR10_2_LAMBDA_2 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_5x5', 2), ('sep_conv_3x3', 2), ('dil_conv_3x3', 3), ('sep_conv_3x3', 2), ('sep_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('sep_conv_5x5', 0), ('dil_conv_5x5', 1), ('sep_conv_3x3', 0), ('sep_conv_5x5', 2), ('sep_conv_3x3', 2), ('sep_conv_5x5', 3), ('max_pool_3x3', 0), ('dil_conv_5x5', 3)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR10_1_LAMBDA_3 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_5x5', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('dil_conv_5x5', 3), ('skip_connect', 1), ('dil_conv_5x5', 2)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('sep_conv_5x5', 1), ('max_pool_3x3', 0), ('sep_conv_3x3', 2), ('sep_conv_5x5', 2), ('dil_conv_5x5', 3), ('sep_conv_5x5', 0), ('sep_conv_3x3', 3)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR10_2_LAMBDA_3 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_5x5', 1), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('sep_conv_5x5', 1), ('sep_conv_3x3', 3), ('dil_conv_3x3', 3), ('dil_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('dil_conv_3x3', 0), ('dil_conv_3x3', 1), ('dil_conv_3x3', 1), ('sep_conv_5x5', 2), ('dil_conv_5x5', 2), ('sep_conv_3x3', 3), ('sep_conv_3x3', 0), ('sep_conv_3x3', 4)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR100_1_LAMBDA_1 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('max_pool_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 2), ('avg_pool_3x3', 0), ('sep_conv_5x5', 4)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR100_2_LAMBDA_1 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('sep_conv_3x3', 1), ('sep_conv_3x3', 3)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('skip_connect', 1), ('sep_conv_5x5', 1), ('sep_conv_3x3', 2), ('sep_conv_3x3', 2), ('sep_conv_3x3', 3), ('sep_conv_5x5', 1), ('sep_conv_3x3', 2)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR100_1_LAMBDA_0_1 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 2), ('skip_connect', 0), ('sep_conv_5x5', 2), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1)], normal_concat=range(2, 6), reduce=[('avg_pool_3x3', 0), ('sep_conv_3x3', 1), ('avg_pool_3x3', 1), ('sep_conv_3x3', 2), ('sep_conv_3x3', 2), ('sep_conv_3x3', 3), ('skip_connect', 1), ('dil_conv_5x5', 4)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR100_2_LAMBDA_0_1 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('dil_conv_5x5', 4)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('avg_pool_3x3', 1), ('sep_conv_5x5', 0), ('sep_conv_3x3', 1), ('skip_connect', 1), ('sep_conv_3x3', 3), ('max_pool_3x3', 0), ('avg_pool_3x3', 1)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR100_1_LAMBDA_0_5 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 3), ('sep_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('sep_conv_3x3', 0), ('max_pool_3x3', 1), ('max_pool_3x3', 0), ('sep_conv_5x5', 1), ('sep_conv_3x3', 2), ('sep_conv_5x5', 3), ('sep_conv_3x3', 3), ('sep_conv_5x5', 4)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR100_2_LAMBDA_0_5 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 1), ('skip_connect', 0), ('sep_conv_3x3', 3), ('sep_conv_3x3', 1), ('sep_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('skip_connect', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('avg_pool_3x3', 0), ('sep_conv_5x5', 1), ('avg_pool_3x3', 0), ('sep_conv_3x3', 4)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR100_1_LAMBDA_2 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_5x5', 1), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('sep_conv_3x3', 1), ('sep_conv_5x5', 3), ('sep_conv_3x3', 1), ('sep_conv_3x3', 3)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('sep_conv_3x3', 1), ('sep_conv_3x3', 3), ('skip_connect', 0), ('sep_conv_3x3', 3)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR100_2_LAMBDA_2 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('sep_conv_3x3', 1), ('sep_conv_3x3', 3), ('sep_conv_3x3', 2), ('sep_conv_5x5', 4)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('max_pool_3x3', 1), ('dil_conv_5x5', 1), ('sep_conv_3x3', 2), ('skip_connect', 0), ('sep_conv_3x3', 2), ('sep_conv_5x5', 3), ('sep_conv_3x3', 4)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR100_1_LAMBDA_3 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('sep_conv_3x3', 1), ('sep_conv_3x3', 2), ('sep_conv_3x3', 2), ('sep_conv_3x3', 4)], normal_concat=range(2, 6), reduce=[('max_pool_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 1), ('sep_conv_5x5', 2), ('sep_conv_5x5', 1), ('sep_conv_3x3', 3), ('sep_conv_3x3', 1), ('sep_conv_5x5', 3)], reduce_concat=range(2, 6))
PDARTS_COOP_CIFAR100_2_LAMBDA_3 = Genotype(normal=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('sep_conv_3x3', 0), ('sep_conv_3x3', 2), ('sep_conv_3x3', 0), ('sep_conv_3x3', 3), ('sep_conv_3x3', 3), ('dil_conv_5x5', 4)], normal_concat=range(2, 6), reduce=[('sep_conv_3x3', 0), ('sep_conv_3x3', 1), ('max_pool_3x3', 0), ('sep_conv_3x3', 2), ('dil_conv_5x5', 2), ('sep_conv_3x3', 3), ('sep_conv_3x3', 2), ('sep_conv_3x3', 3)], reduce_concat=range(2, 6))
| 184.069652
| 454
| 0.653738
| 6,480
| 36,998
| 3.289043
| 0.007407
| 0.188852
| 0.217708
| 0.10374
| 0.985877
| 0.984047
| 0.979027
| 0.965749
| 0.950077
| 0.919157
| 0
| 0.120498
| 0.093356
| 36,998
| 200
| 455
| 184.99
| 0.514876
| 0.268312
| 0
| 0.32
| 0
| 0
| 0.427425
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.008
| 0
| 0.008
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
0bb42ea077bd00b3b1e5d0995ecfae89e5b49ee5
| 186
|
py
|
Python
|
icbd/compiler/tests/import_target.py
|
kmod/icbd
|
9636564eb3993afa07c6220d589bbd1991923d74
|
[
"MIT"
] | 7
|
2015-04-06T15:17:13.000Z
|
2020-10-21T04:57:00.000Z
|
icbd/compiler/tests/import_target.py
|
kmod/icbd
|
9636564eb3993afa07c6220d589bbd1991923d74
|
[
"MIT"
] | null | null | null |
icbd/compiler/tests/import_target.py
|
kmod/icbd
|
9636564eb3993afa07c6220d589bbd1991923d74
|
[
"MIT"
] | 4
|
2016-05-16T17:53:08.000Z
|
2020-11-28T17:18:50.000Z
|
import import_nested_target
print "importing", __name__
import import_nested_target
x = 1
def foo():
print "foo()"
# def k():
# print x
# foo()
class C(object):
pass
| 11.625
| 27
| 0.639785
| 26
| 186
| 4.269231
| 0.576923
| 0.216216
| 0.324324
| 0.432432
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.007092
| 0.241935
| 186
| 15
| 28
| 12.4
| 0.780142
| 0.11828
| 0
| 0.25
| 0
| 0
| 0.0875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.125
| 0.375
| null | null | 0.25
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
|
0
| 7
|
f019e3d5d8729ae9db35c27057f61c31bc0190bd
| 7,329
|
py
|
Python
|
polynomials_on_simplices/calculus/test/finite_difference_test.py
|
FAndersson/polynomials_on_simplices
|
f015a4772c817bfa99b0d6b726667a38a174b064
|
[
"MIT"
] | 1
|
2021-03-17T11:41:21.000Z
|
2021-03-17T11:41:21.000Z
|
polynomials_on_simplices/calculus/test/finite_difference_test.py
|
FAndersson/polynomials_on_simplices
|
f015a4772c817bfa99b0d6b726667a38a174b064
|
[
"MIT"
] | null | null | null |
polynomials_on_simplices/calculus/test/finite_difference_test.py
|
FAndersson/polynomials_on_simplices
|
f015a4772c817bfa99b0d6b726667a38a174b064
|
[
"MIT"
] | null | null | null |
""" Unit tests for the finite_difference module
"""
import unittest
import numpy as np
from scipy.optimize import rosen, rosen_der, rosen_hess
from polynomials_on_simplices.calculus.finite_difference import (
central_difference, central_difference_jacobian, forward_difference, forward_difference_jacobian,
second_central_difference, second_forward_difference)
def is_equal(array1, array2):
""" Check if two numpy arrays are approximately equal
"""
try:
np.testing.assert_allclose(array1, array2, atol=1e-4, rtol=1e-4)
except AssertionError as ae:
print(ae)
return False
return True
class TestRosenbrockCD(unittest.TestCase):
def test1(self):
x = np.zeros(100)
gradient = rosen_der(x)
fd_gradient = central_difference(rosen, x)
self.assertTrue(is_equal(gradient, fd_gradient))
hessian = rosen_hess(x)
fd_hessian = second_central_difference(rosen, x)
self.assertTrue(np.allclose(hessian, fd_hessian, rtol=1e-5, atol=1e-4))
def test2(self):
x = np.ones(100)
gradient = rosen_der(x)
fd_gradient = central_difference(rosen, x)
self.assertTrue(is_equal(gradient, fd_gradient))
hessian = rosen_hess(x)
fd_hessian = second_central_difference(rosen, x)
self.assertTrue(np.allclose(hessian, fd_hessian, rtol=1e-5, atol=1e-4))
def test3(self):
x = np.random.rand(100)
gradient = rosen_der(x)
fd_gradient = central_difference(rosen, x)
self.assertTrue(is_equal(gradient, fd_gradient))
hessian = rosen_hess(x)
fd_hessian = second_central_difference(rosen, x)
self.assertTrue(np.allclose(hessian, fd_hessian, rtol=1e-5, atol=1e-2))
class TestRosenbrockFD(unittest.TestCase):
def test1(self):
x = np.zeros(100)
gradient = rosen_der(x)
fd_gradient = forward_difference(rosen, x)
self.assertTrue(is_equal(gradient, fd_gradient))
hessian = rosen_hess(x)
fd_hessian = second_forward_difference(rosen, x)
self.assertTrue(np.allclose(hessian, fd_hessian, rtol=1e-5, atol=1e-2))
def test2(self):
x = np.ones(100)
gradient = rosen_der(x)
fd_gradient = forward_difference(rosen, x)
self.assertTrue(is_equal(gradient, fd_gradient))
hessian = rosen_hess(x)
fd_hessian = second_forward_difference(rosen, x)
self.assertTrue(np.allclose(hessian, fd_hessian, rtol=1e-5, atol=1e-1))
def test3(self):
x = np.random.rand(100)
gradient = rosen_der(x)
fd_gradient = forward_difference(rosen, x)
self.assertTrue(is_equal(gradient, fd_gradient))
hessian = rosen_hess(x)
fd_hessian = second_forward_difference(rosen, x)
self.assertTrue(np.allclose(hessian, fd_hessian, rtol=1e-5, atol=1e-1))
class Test1D(unittest.TestCase):
def test_sin(self):
f = np.sin
x = np.random.rand()
d = forward_difference(f, x)
self.assertTrue(np.abs(d - np.cos(x)) < 1e-6)
d = central_difference(f, x)
self.assertTrue(np.abs(d - np.cos(x)) < 1e-6)
d2 = second_forward_difference(f, x)
self.assertTrue(np.abs(d2 - (-np.sin(x))) < 1e-4)
d2 = second_central_difference(f, x)
self.assertTrue(np.abs(d2 - (-np.sin(x))) < 1e-5)
class TestJacobian(unittest.TestCase):
def test_fd_1(self):
# f : R^2 -> R^2
def f(x):
return np.array([x[0]**2 * x[1], 5 * x[0] + np.sin(x[1])])
p = np.random.rand(2)
j_expected = np.array([
[2 * p[0] * p[1], p[0]**2],
[5, np.cos(p[1])]
])
j_fd = forward_difference_jacobian(f, 2, p)
assert j_fd.shape == (2, 2)
self.assertTrue(np.allclose(j_expected, j_fd))
def test_fd_2(self):
# f : R^3 -> R^4
def f(x):
return np.array([
x[0],
5 * x[2],
4 * x[1]**2 - 2 * x[2],
x[2] * np.sin(x[0])
])
p = np.random.rand(3)
j_expected = np.array([
[1.0, 0.0, 0.0],
[0.0, 0.0, 5.0],
[0.0, 8.0 * p[1], -2.0],
[p[2] * np.cos(p[0]), 0.0, np.sin(p[0])]
])
j_fd = forward_difference_jacobian(f, 4, p)
assert j_fd.shape == (4, 3)
self.assertTrue(np.allclose(j_expected, j_fd))
def test_fd_3(self):
# The Jacobian matrix should still be a matrix, even in the special case where the function is univariate
# f : R -> R^2
def f(x):
return np.array([x, x**2])
p = np.random.rand()
j_expected = np.array([[1], [2 * p]])
j_fd = forward_difference_jacobian(f, 2, p)
assert j_fd.shape == (2, 1)
self.assertTrue(np.allclose(j_expected, j_fd))
def test_fd_4(self):
# The Jacobian matrix should still be a matrix, even in the special case where the function is scalar valued
# f : R^2 -> R
def f(x):
return x[0] * x[1]**2
p = np.random.rand(2)
j_expected = np.array([[p[1]**2, 2 * p[0] * p[1]]])
j_fd = forward_difference_jacobian(f, 1, p)
assert j_fd.shape == (1, 2)
self.assertTrue(np.allclose(j_expected, j_fd))
def test_cd_1(self):
# f : R^2 -> R^2
def f(x):
return np.array([x[0]**2 * x[1], 5 * x[0] + np.sin(x[1])])
p = np.random.rand(2)
j_expected = np.array([
[2 * p[0] * p[1], p[0]**2],
[5, np.cos(p[1])]
])
j_fd = central_difference_jacobian(f, 2, p)
assert j_fd.shape == (2, 2)
self.assertTrue(np.allclose(j_expected, j_fd))
def test_cd_2(self):
# f : R^3 -> R^4
def f(x):
return np.array([
x[0],
5 * x[2],
4 * x[1]**2 - 2 * x[2],
x[2] * np.sin(x[0])
])
p = np.random.rand(3)
j_expected = np.array([
[1.0, 0.0, 0.0],
[0.0, 0.0, 5.0],
[0.0, 8.0 * p[1], -2.0],
[p[2] * np.cos(p[0]), 0.0, np.sin(p[0])]
])
j_fd = central_difference_jacobian(f, 4, p)
assert j_fd.shape == (4, 3)
self.assertTrue(np.allclose(j_expected, j_fd))
def test_cd_3(self):
# The Jacobian matrix should still be a matrix, even in the special case where the function is univariate
# f : R -> R^2
def f(x):
return np.array([x, x**2])
p = np.random.rand()
j_expected = np.array([[1], [2 * p]])
j_fd = central_difference_jacobian(f, 2, p)
assert j_fd.shape == (2, 1)
self.assertTrue(np.allclose(j_expected, j_fd))
def test_cd_4(self):
# The Jacobian matrix should still be a matrix, even in the special case where the function is scalar valued
# f : R^2 -> R
def f(x):
return x[0] * x[1]**2
p = np.random.rand(2)
j_expected = np.array([[p[1]**2, 2 * p[0] * p[1]]])
j_fd = central_difference_jacobian(f, 1, p)
assert j_fd.shape == (1, 2)
self.assertTrue(np.allclose(j_expected, j_fd))
if __name__ == "__main__":
unittest.main()
| 30.285124
| 116
| 0.560377
| 1,094
| 7,329
| 3.603291
| 0.099634
| 0.085236
| 0.073059
| 0.085236
| 0.834855
| 0.834855
| 0.831304
| 0.831304
| 0.820649
| 0.820649
| 0
| 0.045755
| 0.299222
| 7,329
| 241
| 117
| 30.410788
| 0.721768
| 0.086369
| 0
| 0.761905
| 0
| 0
| 0.001199
| 0
| 0
| 0
| 0
| 0
| 0.202381
| 1
| 0.142857
| false
| 0
| 0.02381
| 0.047619
| 0.25
| 0.005952
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
f02231f8080ee24555ffcbee63218ec93b78787a
| 234
|
py
|
Python
|
pretrain/modules/__init__.py
|
xiling42/VL-BERT
|
4573b4e1e82b6c092d4830d0b88821e9ee1a81fb
|
[
"MIT"
] | 671
|
2019-11-22T06:29:37.000Z
|
2022-03-26T02:06:11.000Z
|
pretrain/modules/__init__.py
|
xiling42/VL-BERT
|
4573b4e1e82b6c092d4830d0b88821e9ee1a81fb
|
[
"MIT"
] | 83
|
2019-12-01T04:14:29.000Z
|
2022-02-14T17:16:13.000Z
|
pretrain/modules/__init__.py
|
xiling42/VL-BERT
|
4573b4e1e82b6c092d4830d0b88821e9ee1a81fb
|
[
"MIT"
] | 123
|
2019-12-03T12:58:37.000Z
|
2022-03-30T02:42:01.000Z
|
from .resnet_vlbert_for_pretraining import ResNetVLBERTForPretraining
from .resnet_vlbert_for_pretraining_multitask import ResNetVLBERTForPretrainingMultitask
from .resnet_vlbert_for_attention_vis import ResNetVLBERTForAttentionVis
| 39
| 88
| 0.92735
| 23
| 234
| 8.956522
| 0.521739
| 0.145631
| 0.23301
| 0.276699
| 0.291262
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.059829
| 234
| 5
| 89
| 46.8
| 0.936364
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f03dd725d7535b418a3c15449aad96cc9262a536
| 21,839
|
py
|
Python
|
tensorlayerx/optimizers/torch_optimizers.py
|
tensorlayer/TensorLayerX
|
4e3e6f13687309dda7787f0b86e35a62bb3adbad
|
[
"Apache-2.0"
] | 34
|
2021-12-03T08:19:23.000Z
|
2022-03-13T08:34:34.000Z
|
tensorlayerx/optimizers/torch_optimizers.py
|
tensorlayer/TensorLayerX
|
4e3e6f13687309dda7787f0b86e35a62bb3adbad
|
[
"Apache-2.0"
] | null | null | null |
tensorlayerx/optimizers/torch_optimizers.py
|
tensorlayer/TensorLayerX
|
4e3e6f13687309dda7787f0b86e35a62bb3adbad
|
[
"Apache-2.0"
] | 3
|
2021-12-28T16:57:20.000Z
|
2022-03-18T02:23:14.000Z
|
#! /usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import torch.optim as optimizer
from torch.optim import _functional as F
import torch
from tensorlayerx.optimizers.lr import LRScheduler
__all__ = ['Adadelta', 'Adagrad', 'Adam', 'Adamax', 'Ftrl', 'Nadam', 'RMSprop', 'SGD', 'Momentum', 'Lamb', 'LARS']
class Adadelta(object):
def __init__(
self,
lr=0.001,
rho=0.95,
eps=1e-10,
weight_decay=0.0,
grad_clip=None,
):
self.lr = lr
self.rho = rho
self.eps = eps
self.init_optim = False
self.weight_decay = weight_decay
self.grad_clip = grad_clip
@torch.no_grad()
def apply_gradients(self, grads_and_vars=None, closure=None):
if not self.init_optim:
raise AttributeError("Can not apply gradients before zero_grad call.")
loss = None
if closure is not None:
with torch.enable_grad():
loss = closure()
for group in self.optimizer_adadelta.param_groups:
params_with_grad = []
grads = []
square_avgs = []
acc_deltas = []
lr, rho, eps, weight_decay = get_lr(self.lr), group['rho'], group['eps'], group['weight_decay']
for p in group['params']:
if p.grad is None:
continue
params_with_grad.append(p)
if p.grad.is_sparse:
raise RuntimeError('Adadelta does not support sparse gradients')
grads.append(p.grad)
state = self.optimizer_adadelta.state[p]
# Lazy state initialization
if len(state) == 0:
state['step'] = 0
state['square_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format)
state['acc_delta'] = torch.zeros_like(p, memory_format=torch.preserve_format)
square_avgs.append(state['square_avg'])
acc_deltas.append(state['acc_delta'])
state['step'] += 1
F.adadelta(params_with_grad,
grads,
square_avgs,
acc_deltas,
lr=lr,
rho=rho,
eps=eps,
weight_decay=weight_decay)
return loss
def gradient(self, loss, weights=None, return_grad=True):
if weights is None:
raise AttributeError("Parameter train_weights must be entered.")
if not self.init_optim:
self.optimizer_adadelta = optimizer.Adadelta(
params=weights, lr=get_lr(self.lr), rho=self.rho, eps=self.eps, weight_decay=self.weight_decay
)
self.init_optim = True
self.optimizer_adadelta.zero_grad()
loss.backward()
if self.grad_clip is not None:
self.grad_clip(weights)
if return_grad ==True:
return _grads(weights)
else:
return None
class Adagrad(object):
def __init__(
self,
lr=0.001,
initial_accumulator_value=0.1,
eps=1e-10,
weight_decay=0.0,
grad_clip=None,
):
self.lr = lr
self.initial_accumulator_value = initial_accumulator_value
self.eps = eps
self.init_optim = False
self.weight_decay = weight_decay
self.grad_clip = grad_clip
@torch.no_grad()
def apply_gradients(self, grads_and_vars=None, closure=None):
if not self.init_optim:
raise AttributeError("Can not apply gradients before zero_grad call.")
loss = None
if closure is not None:
with torch.enable_grad():
loss = closure()
for group in self.optimizer_adagrad.param_groups:
params_with_grad = []
grads = []
state_sums = []
state_steps = []
for p in group['params']:
if p.grad is not None:
params_with_grad.append(p)
grads.append(p.grad)
state = self.optimizer_adagrad.state[p]
state_sums.append(state['sum'])
# update the steps for each param group update
state['step'] += 1
# record the step after step update
state_steps.append(state['step'])
F.adagrad(params_with_grad,
grads,
state_sums,
state_steps,
lr=get_lr(self.lr),
weight_decay=group['weight_decay'],
lr_decay=group['lr_decay'],
eps=group['eps'])
return loss
def gradient(self, loss, weights=None, return_grad=True):
if weights is None:
raise AttributeError("Parameter train_weights must be entered.")
if not self.init_optim:
self.optimizer_adagrad = optimizer.Adagrad(
params=weights, lr=get_lr(self.lr), lr_decay=self.initial_accumulator_value,
weight_decay=self.weight_decay
)
self.init_optim = True
self.optimizer_adagrad.zero_grad()
loss.backward()
if self.grad_clip is not None:
self.grad_clip(weights)
if return_grad ==True:
return _grads(weights)
else:
return None
class Adam(object):
def __init__(
self,
lr=0.001,
beta_1=0.9,
beta_2=0.999,
eps=1e-8,
weight_decay=0.0,
grad_clip=None,
):
self.lr = lr
self.beta_1 = beta_1
self.beta_2 = beta_2
self.eps = eps
self.init_optim = False
self.weight_decay = weight_decay
self.grad_clip = grad_clip
@torch.no_grad()
def apply_gradients(self, grads_and_vars=None, closure=None):
if not self.init_optim:
raise AttributeError("Can not apply gradients before zero_grad call.")
loss = None
if closure is not None:
with torch.enable_grad():
loss = closure()
for group in self.optimizer_adam.param_groups:
params_with_grad = []
grads = []
exp_avgs = []
exp_avg_sqs = []
max_exp_avg_sqs = []
state_steps = []
beta1, beta2 = group['betas']
for p in group['params']:
if p.grad is not None:
params_with_grad.append(p)
if p.grad.is_sparse:
raise RuntimeError('Adam does not support sparse gradients, please consider SparseAdam instead')
grads.append(p.grad)
state = self.optimizer_adam.state[p]
# Lazy state initialization
if len(state) == 0:
state['step'] = 0
# Exponential moving average of gradient values
state['exp_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format)
# Exponential moving average of squared gradient values
state['exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format)
if group['amsgrad']:
# Maintains max of all exp. moving avg. of sq. grad. values
state['max_exp_avg_sq'] = torch.zeros_like(p, memory_format=torch.preserve_format)
exp_avgs.append(state['exp_avg'])
exp_avg_sqs.append(state['exp_avg_sq'])
if group['amsgrad']:
max_exp_avg_sqs.append(state['max_exp_avg_sq'])
# update the steps for each param group update
state['step'] += 1
# record the step after step update
state_steps.append(state['step'])
F.adam(params_with_grad,
grads,
exp_avgs,
exp_avg_sqs,
max_exp_avg_sqs,
state_steps,
amsgrad=group['amsgrad'],
beta1=beta1,
beta2=beta2,
lr=get_lr(self.lr),
weight_decay=group['weight_decay'],
eps=group['eps'])
return loss
def gradient(self, loss, weights=None, return_grad=True):
if weights is None:
raise AttributeError("Parameter train_weights must be entered.")
if not self.init_optim:
self.optimizer_adam = optimizer.Adam(
params=weights, lr=get_lr(self.lr), betas=(self.beta_1, self.beta_2), eps=self.eps,
weight_decay=self.weight_decay
)
self.init_optim = True
self.optimizer_adam.zero_grad()
loss.backward()
if self.grad_clip is not None:
self.grad_clip(weights)
if return_grad ==True:
return _grads(weights)
else:
return None
class Adamax(object):
def __init__(
self,
lr=0.001,
beta_1=0.9,
beta_2=0.999,
eps=1e-8,
weight_decay=0.0,
grad_clip=None,
):
self.lr = lr
self.beta_1 = beta_1
self.beta_2 = beta_2
self.eps = eps
self.init_optim = False
self.weight_decay = weight_decay
self.grad_clip = grad_clip
@torch.no_grad()
def apply_gradients(self, grads_and_vars=None, closure=None):
if not self.init_optim:
raise AttributeError("Can not apply gradients before zero_grad call.")
loss = None
if closure is not None:
with torch.enable_grad():
loss = closure()
for group in self.optimizer_adamax.param_groups:
params_with_grad = []
grads = []
exp_avgs = []
exp_infs = []
state_steps = []
beta1, beta2 = group['betas']
eps = group['eps']
lr = get_lr(self.lr)
weight_decay = group['weight_decay']
for p in group['params']:
if p.grad is None:
continue
params_with_grad.append(p)
if p.grad.is_sparse:
raise RuntimeError('Adamax does not support sparse gradients')
grads.append(p.grad)
state = self.optimizer_adamax.state[p]
# State initialization
if len(state) == 0:
state['step'] = 0
state['exp_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format)
state['exp_inf'] = torch.zeros_like(p, memory_format=torch.preserve_format)
exp_avgs.append(state['exp_avg'])
exp_infs.append(state['exp_inf'])
state['step'] += 1
state_steps.append(state['step'])
F.adamax(params_with_grad,
grads,
exp_avgs,
exp_infs,
state_steps,
eps=eps,
beta1=beta1,
beta2=beta2,
lr=lr,
weight_decay=weight_decay)
return loss
def gradient(self, loss, weights=None, return_grad=True):
if weights is None:
raise AttributeError("Parameter train_weights must be entered.")
if not self.init_optim:
self.optimizer_adamax = optimizer.Adamax(
params=weights, lr=get_lr(self.lr), betas=(self.beta_1, self.beta_2), eps=self.eps,
weight_decay=self.weight_decay
)
self.init_optim = True
self.optimizer_adamax.zero_grad()
loss.backward()
if self.grad_clip is not None:
self.grad_clip(weights)
if return_grad ==True:
return _grads(weights)
else:
return None
class Ftrl(object):
def __init__(self):
raise NotImplementedError("Ftrl optimizer is not implemented")
def apply_gradients(self):
pass
def gradient(self, train_weights=None):
pass
class Nadam(object):
def __init__(self):
raise NotImplementedError("Nadam optimizer is not implemented")
def apply_gradients(self):
pass
def gradient(self, train_weights=None):
pass
class RMSprop(object):
def __init__(
self,
lr=0.001,
rho=0.99,
momentum=0.0,
eps=1e-08,
centered=False,
weight_decay=0.0,
grad_clip=None,
):
self.lr = lr
self.rho = rho
self.momentum = momentum
self.eps = eps
self.centered = centered
self.init_optim = False
self.weight_decay = weight_decay
self.grad_clip = grad_clip
@torch.no_grad()
def apply_gradients(self, grads_and_vars=None, closure=None):
if not self.init_optim:
raise AttributeError("Can not apply gradients before zero_grad call.")
loss = None
if closure is not None:
with torch.enable_grad():
loss = closure()
for group in self.optimizer_rmsprop.param_groups:
params_with_grad = []
grads = []
square_avgs = []
grad_avgs = []
momentum_buffer_list = []
for p in group['params']:
if p.grad is None:
continue
params_with_grad.append(p)
if p.grad.is_sparse:
raise RuntimeError('RMSprop does not support sparse gradients')
grads.append(p.grad)
state = self.optimizer_rmsprop.state[p]
# State initialization
if len(state) == 0:
state['step'] = 0
state['square_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format)
if group['momentum'] > 0:
state['momentum_buffer'] = torch.zeros_like(p, memory_format=torch.preserve_format)
if group['centered']:
state['grad_avg'] = torch.zeros_like(p, memory_format=torch.preserve_format)
square_avgs.append(state['square_avg'])
if group['momentum'] > 0:
momentum_buffer_list.append(state['momentum_buffer'])
if group['centered']:
grad_avgs.append(state['grad_avg'])
state['step'] += 1
F.rmsprop(params_with_grad,
grads,
square_avgs,
grad_avgs,
momentum_buffer_list,
lr=get_lr(self.lr),
alpha=group['alpha'],
eps=group['eps'],
weight_decay=group['weight_decay'],
momentum=group['momentum'],
centered=group['centered'])
return loss
def gradient(self, loss, weights=None, return_grad=True):
if weights is None:
raise AttributeError("Parameter train_weights must be entered.")
if not self.init_optim:
self.optimizer_rmsprop = optimizer.RMSprop(
params=weights, lr=get_lr(self.lr), alpha=self.rho, eps=self.eps, momentum=self.momentum,
centered=self.centered, weight_decay=self.weight_decay
)
self.init_optim = True
self.optimizer_rmsprop.zero_grad()
loss.backward()
if self.grad_clip is not None:
self.grad_clip(weights)
if return_grad ==True:
return _grads(weights)
else:
return None
class SGD(object):
def __init__(
self,
lr=0.001,
momentum=0,
weight_decay=0.0,
grad_clip=None,
):
self.lr = lr
self.momentum = momentum
self.init_optim = False
self.weight_decay = weight_decay
self.grad_clip = grad_clip
@torch.no_grad()
def apply_gradients(self, grads_and_vars=None, closure=None):
if not self.init_optim:
raise AttributeError("Can not apply gradients before zero_grad call.")
loss = None
if closure is not None:
with torch.enable_grad():
loss = closure()
for group in self.optimizer_sgd.param_groups:
params_with_grad = []
d_p_list = []
momentum_buffer_list = []
weight_decay = group['weight_decay']
momentum = group['momentum']
dampening = group['dampening']
nesterov = group['nesterov']
lr = get_lr(self.lr)
for p in group['params']:
if p.grad is not None:
params_with_grad.append(p)
d_p_list.append(p.grad)
state = self.optimizer_sgd.state[p]
if 'momentum_buffer' not in state:
momentum_buffer_list.append(None)
else:
momentum_buffer_list.append(state['momentum_buffer'])
F.sgd(params_with_grad,
d_p_list,
momentum_buffer_list,
weight_decay=weight_decay,
momentum=momentum,
lr=lr,
dampening=dampening,
nesterov=nesterov)
# update momentum_buffers in state
for p, momentum_buffer in zip(params_with_grad, momentum_buffer_list):
state = self.optimizer_sgd.state[p]
state['momentum_buffer'] = momentum_buffer
return loss
def gradient(self, loss, weights=None, return_grad=True):
if weights is None:
raise AttributeError("Parameter train_weights must be entered.")
if not self.init_optim:
self.optimizer_sgd = optimizer.SGD(
params=weights, lr=get_lr(self.lr), momentum=self.momentum, weight_decay=self.weight_decay
)
self.init_optim = True
self.optimizer_sgd.zero_grad()
loss.backward()
if self.grad_clip is not None:
self.grad_clip(weights)
if return_grad ==True:
return _grads(weights)
else:
return None
class Momentum(object):
def __init__(
self,
lr=0.001,
momentum=0,
weight_decay=0.0,
nesterov=False,
grad_clip=None,
):
self.lr = lr
self.momentum = momentum
self.init_optim = False
self.weight_decay = weight_decay
self.nesterov = nesterov
self.grad_clip = grad_clip
@torch.no_grad()
def apply_gradients(self, grads_and_vars=None, closure=None):
if not self.init_optim:
raise AttributeError("Can not apply gradients before zero_grad call.")
loss = None
if closure is not None:
with torch.enable_grad():
loss = closure()
for group in self.optimizer_momentum.param_groups:
params_with_grad = []
d_p_list = []
momentum_buffer_list = []
weight_decay = group['weight_decay']
momentum = group['momentum']
dampening = group['dampening']
nesterov = group['nesterov']
lr = get_lr(self.lr)
for p in group['params']:
if p.grad is not None:
params_with_grad.append(p)
d_p_list.append(p.grad)
state = self.optimizer_momentum.state[p]
if 'momentum_buffer' not in state:
momentum_buffer_list.append(None)
else:
momentum_buffer_list.append(state['momentum_buffer'])
F.sgd(params_with_grad,
d_p_list,
momentum_buffer_list,
weight_decay=weight_decay,
momentum=momentum,
lr=lr,
dampening=dampening,
nesterov=nesterov)
# update momentum_buffers in state
for p, momentum_buffer in zip(params_with_grad, momentum_buffer_list):
state = self.optimizer_momentum.state[p]
state['momentum_buffer'] = momentum_buffer
return loss
def gradient(self, loss, weights=None, return_grad=True):
if weights is None:
raise AttributeError("Parameter train_weights must be entered.")
if not self.init_optim:
self.optimizer_momentum = optimizer.SGD(
params=weights, lr=get_lr(self.lr), momentum=self.momentum, weight_decay=self.weight_decay, nesterov=self.nesterov
)
self.init_optim = True
self.optimizer_momentum.zero_grad()
loss.backward()
if self.grad_clip is not None:
self.grad_clip(weights)
if return_grad ==True:
return _grads(weights)
else:
return None
def Lamb(**kwargs):
raise Exception('Lamb optimizer function not implemented')
def LARS(**kwargs):
raise Exception('LARS optimizer function not implemented')
def _grads(weights):
grads = []
for w in weights:
grads.append(w.grad)
return grads
def get_lr(lr):
if isinstance(lr, LRScheduler):
return lr()
return lr
| 32.069016
| 130
| 0.535647
| 2,430
| 21,839
| 4.595062
| 0.069136
| 0.056153
| 0.032599
| 0.016299
| 0.856708
| 0.834139
| 0.813183
| 0.794197
| 0.781121
| 0.76715
| 0
| 0.009058
| 0.378222
| 21,839
| 680
| 131
| 32.116176
| 0.813241
| 0.023582
| 0
| 0.779412
| 0
| 0
| 0.076259
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.056985
| false
| 0.007353
| 0.009191
| 0
| 0.126838
| 0.001838
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e8f8376076961e379ba46aa1b139740c0af91eee
| 62,052
|
py
|
Python
|
infoblox_netmri/api/broker/v3_8_0/if_arp_broker.py
|
infobloxopen/infoblox_netmri
|
aa1c744df7e439dbe163bb9edd165e4e85a9771b
|
[
"Apache-2.0"
] | 12
|
2016-02-19T12:37:54.000Z
|
2022-03-04T20:11:08.000Z
|
infoblox_netmri/api/broker/v3_8_0/if_arp_broker.py
|
infobloxopen/infoblox_netmri
|
aa1c744df7e439dbe163bb9edd165e4e85a9771b
|
[
"Apache-2.0"
] | 18
|
2015-11-12T18:37:00.000Z
|
2021-05-19T07:59:55.000Z
|
infoblox_netmri/api/broker/v3_8_0/if_arp_broker.py
|
infobloxopen/infoblox_netmri
|
aa1c744df7e439dbe163bb9edd165e4e85a9771b
|
[
"Apache-2.0"
] | 18
|
2016-01-07T12:04:34.000Z
|
2022-03-31T11:05:41.000Z
|
from ..broker import Broker
class IfArpBroker(Broker):
controller = "if_arps"
def show(self, **kwargs):
"""Shows the details for the specified if arp.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param IfArpID: The internal NetMRI identifier for this ARP entry.
:type IfArpID: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of if arp methods. The listed methods will be called on each if arp returned and included in the output. Available methods are: remote_device, remote_interface, data_source, device, interface, vrf, infradevice.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: remote_device, remote_interface, data_source, device, interface, vrf.
:type include: Array of String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return if_arp: The if arp identified by the specified IfArpID.
:rtype if_arp: IfArp
"""
return self.api_request(self._get_method_fullname("show"), kwargs)
def index(self, **kwargs):
"""Lists the available if arps. Any of the inputs listed may be be used to narrow the list; other inputs will be ignored. Of the various ways to query lists, using this method is most efficient.
**Inputs**
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier of the device from which this ARP entry was collected.
:type DeviceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier of the device from which this ARP entry was collected.
:type DeviceID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param IPAddrDotted: The IP address for this ARP entry, in dotted (or colon-delimited for IPv6) format.
:type IPAddrDotted: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IPAddrDotted: The IP address for this ARP entry, in dotted (or colon-delimited for IPv6) format.
:type IPAddrDotted: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param IPAddrNumeric: The numerical value of the IP address for this ARP entry.
:type IPAddrNumeric: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IPAddrNumeric: The numerical value of the IP address for this ARP entry.
:type IPAddrNumeric: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param IfArpID: The internal NetMRI identifier for this ARP entry.
:type IfArpID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IfArpID: The internal NetMRI identifier for this ARP entry.
:type IfArpID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param InterfaceID: The internal NetMRI identifier for the local interface for this ARP table entry.
:type InterfaceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param InterfaceID: The internal NetMRI identifier for the local interface for this ARP table entry.
:type InterfaceID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PhysicalAddr: The physical address for this ARP entry.
:type PhysicalAddr: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PhysicalAddr: The physical address for this ARP entry.
:type PhysicalAddr: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param VrfID: The internal NetMRI identifier for the VRF to which this ARP entry applies, if available.
:type VrfID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param VrfID: The internal NetMRI identifier for the VRF to which this ARP entry applies, if available.
:type VrfID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the if arps as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of if arp methods. The listed methods will be called on each if arp returned and included in the output. Available methods are: remote_device, remote_interface, data_source, device, interface, vrf, infradevice.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: remote_device, remote_interface, data_source, device, interface, vrf.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` IfArpID
:param sort: The data field(s) to use for sorting the output. Default is IfArpID. Valid values are IfArpID, DataSourceID, InterfaceID, DeviceID, ifIndex, ArpStartTime, ArpEndTime, ArpChangedCols, ArpTimestamp, PhysicalAddr, IPAddrNumeric, IPAddrDotted, ArpDeviceID, ArpInterfaceID, VrfID.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each IfArp. Valid values are IfArpID, DataSourceID, InterfaceID, DeviceID, ifIndex, ArpStartTime, ArpEndTime, ArpChangedCols, ArpTimestamp, PhysicalAddr, IPAddrNumeric, IPAddrDotted, ArpDeviceID, ArpInterfaceID, VrfID. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return if_arps: An array of the IfArp objects that match the specified input criteria.
:rtype if_arps: Array of IfArp
"""
return self.api_list_request(self._get_method_fullname("index"), kwargs)
def search(self, **kwargs):
"""Lists the available if arps matching the input criteria. This method provides a more flexible search interface than the index method, but searching using this method is more demanding on the system and will not perform to the same level as the index method. The input fields listed below will be used as in the index method, to filter the result, along with the optional query string and XML filter described below.
**Inputs**
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ArpChangedCols: The fields that changed between this revision of the record and the previous revision.
:type ArpChangedCols: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ArpChangedCols: The fields that changed between this revision of the record and the previous revision.
:type ArpChangedCols: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ArpDeviceID: The internal NetMRI identifier for the remote device to which the ARP entry refers, if available.
:type ArpDeviceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ArpDeviceID: The internal NetMRI identifier for the remote device to which the ARP entry refers, if available.
:type ArpDeviceID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ArpEndTime: The ending effective time of this record, or empty if still in effect.
:type ArpEndTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ArpEndTime: The ending effective time of this record, or empty if still in effect.
:type ArpEndTime: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ArpInterfaceID: The internal NetMRI identifier for the remote interface to which the ARP entry refers, if available.
:type ArpInterfaceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ArpInterfaceID: The internal NetMRI identifier for the remote interface to which the ARP entry refers, if available.
:type ArpInterfaceID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ArpStartTime: The starting effective time of this record.
:type ArpStartTime: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ArpStartTime: The starting effective time of this record.
:type ArpStartTime: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ArpTimestamp: The date and time this record was collected or calculated.
:type ArpTimestamp: DateTime
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ArpTimestamp: The date and time this record was collected or calculated.
:type ArpTimestamp: Array of DateTime
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record.
:type DataSourceID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier of the device from which this ARP entry was collected.
:type DeviceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceID: The internal NetMRI identifier of the device from which this ARP entry was collected.
:type DeviceID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param IPAddrDotted: The IP address for this ARP entry, in dotted (or colon-delimited for IPv6) format.
:type IPAddrDotted: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IPAddrDotted: The IP address for this ARP entry, in dotted (or colon-delimited for IPv6) format.
:type IPAddrDotted: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param IPAddrNumeric: The numerical value of the IP address for this ARP entry.
:type IPAddrNumeric: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IPAddrNumeric: The numerical value of the IP address for this ARP entry.
:type IPAddrNumeric: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param IfArpID: The internal NetMRI identifier for this ARP entry.
:type IfArpID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param IfArpID: The internal NetMRI identifier for this ARP entry.
:type IfArpID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param InterfaceID: The internal NetMRI identifier for the local interface for this ARP table entry.
:type InterfaceID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param InterfaceID: The internal NetMRI identifier for the local interface for this ARP table entry.
:type InterfaceID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param PhysicalAddr: The physical address for this ARP entry.
:type PhysicalAddr: String
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param PhysicalAddr: The physical address for this ARP entry.
:type PhysicalAddr: Array of String
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param VrfID: The internal NetMRI identifier for the VRF to which this ARP entry applies, if available.
:type VrfID: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param VrfID: The internal NetMRI identifier for the VRF to which this ARP entry applies, if available.
:type VrfID: Array of Integer
| ``api version min:`` 2.3
| ``api version max:`` 2.4
| ``required:`` False
| ``default:`` None
:param ifIndex: The SNMP index for the local interface for this ARP table entry.
:type ifIndex: Integer
| ``api version min:`` 2.5
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param ifIndex: The SNMP index for the local interface for this ARP table entry.
:type ifIndex: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the if arps as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of if arp methods. The listed methods will be called on each if arp returned and included in the output. Available methods are: remote_device, remote_interface, data_source, device, interface, vrf, infradevice.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: remote_device, remote_interface, data_source, device, interface, vrf.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` IfArpID
:param sort: The data field(s) to use for sorting the output. Default is IfArpID. Valid values are IfArpID, DataSourceID, InterfaceID, DeviceID, ifIndex, ArpStartTime, ArpEndTime, ArpChangedCols, ArpTimestamp, PhysicalAddr, IPAddrNumeric, IPAddrDotted, ArpDeviceID, ArpInterfaceID, VrfID.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each IfArp. Valid values are IfArpID, DataSourceID, InterfaceID, DeviceID, ifIndex, ArpStartTime, ArpEndTime, ArpChangedCols, ArpTimestamp, PhysicalAddr, IPAddrNumeric, IPAddrDotted, ArpDeviceID, ArpInterfaceID, VrfID. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param query: This value will be matched against if arps, looking to see if one or more of the listed attributes contain the passed value. You may also surround the value with '/' and '/' to perform a regular expression search rather than a containment operation. Any record that matches will be returned. The attributes searched are: ArpChangedCols, ArpDeviceID, ArpEndTime, ArpInterfaceID, ArpStartTime, ArpTimestamp, DataSourceID, DeviceID, IPAddrDotted, IPAddrNumeric, IfArpID, InterfaceID, PhysicalAddr, VrfID, ifIndex.
:type query: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return if_arps: An array of the IfArp objects that match the specified input criteria.
:rtype if_arps: Array of IfArp
"""
return self.api_list_request(self._get_method_fullname("search"), kwargs)
def find(self, **kwargs):
"""Lists the available if arps matching the input specification. This provides the most flexible search specification of all the query mechanisms, enabling searching using comparison operations other than equality. However, it is more complex to use and will not perform as efficiently as the index or search methods. In the input descriptions below, 'field names' refers to the following fields: ArpChangedCols, ArpDeviceID, ArpEndTime, ArpInterfaceID, ArpStartTime, ArpTimestamp, DataSourceID, DeviceID, IPAddrDotted, IPAddrNumeric, IfArpID, InterfaceID, PhysicalAddr, VrfID, ifIndex.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ArpChangedCols: The operator to apply to the field ArpChangedCols. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ArpChangedCols: The fields that changed between this revision of the record and the previous revision. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ArpChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ArpChangedCols: If op_ArpChangedCols is specified, the field named in this input will be compared to the value in ArpChangedCols using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ArpChangedCols must be specified if op_ArpChangedCols is specified.
:type val_f_ArpChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ArpChangedCols: If op_ArpChangedCols is specified, this value will be compared to the value in ArpChangedCols using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ArpChangedCols must be specified if op_ArpChangedCols is specified.
:type val_c_ArpChangedCols: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ArpDeviceID: The operator to apply to the field ArpDeviceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ArpDeviceID: The internal NetMRI identifier for the remote device to which the ARP entry refers, if available. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ArpDeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ArpDeviceID: If op_ArpDeviceID is specified, the field named in this input will be compared to the value in ArpDeviceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ArpDeviceID must be specified if op_ArpDeviceID is specified.
:type val_f_ArpDeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ArpDeviceID: If op_ArpDeviceID is specified, this value will be compared to the value in ArpDeviceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ArpDeviceID must be specified if op_ArpDeviceID is specified.
:type val_c_ArpDeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ArpEndTime: The operator to apply to the field ArpEndTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ArpEndTime: The ending effective time of this record, or empty if still in effect. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ArpEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ArpEndTime: If op_ArpEndTime is specified, the field named in this input will be compared to the value in ArpEndTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ArpEndTime must be specified if op_ArpEndTime is specified.
:type val_f_ArpEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ArpEndTime: If op_ArpEndTime is specified, this value will be compared to the value in ArpEndTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ArpEndTime must be specified if op_ArpEndTime is specified.
:type val_c_ArpEndTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ArpInterfaceID: The operator to apply to the field ArpInterfaceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ArpInterfaceID: The internal NetMRI identifier for the remote interface to which the ARP entry refers, if available. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ArpInterfaceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ArpInterfaceID: If op_ArpInterfaceID is specified, the field named in this input will be compared to the value in ArpInterfaceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ArpInterfaceID must be specified if op_ArpInterfaceID is specified.
:type val_f_ArpInterfaceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ArpInterfaceID: If op_ArpInterfaceID is specified, this value will be compared to the value in ArpInterfaceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ArpInterfaceID must be specified if op_ArpInterfaceID is specified.
:type val_c_ArpInterfaceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ArpStartTime: The operator to apply to the field ArpStartTime. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ArpStartTime: The starting effective time of this record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ArpStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ArpStartTime: If op_ArpStartTime is specified, the field named in this input will be compared to the value in ArpStartTime using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ArpStartTime must be specified if op_ArpStartTime is specified.
:type val_f_ArpStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ArpStartTime: If op_ArpStartTime is specified, this value will be compared to the value in ArpStartTime using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ArpStartTime must be specified if op_ArpStartTime is specified.
:type val_c_ArpStartTime: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ArpTimestamp: The operator to apply to the field ArpTimestamp. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ArpTimestamp: The date and time this record was collected or calculated. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ArpTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ArpTimestamp: If op_ArpTimestamp is specified, the field named in this input will be compared to the value in ArpTimestamp using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ArpTimestamp must be specified if op_ArpTimestamp is specified.
:type val_f_ArpTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ArpTimestamp: If op_ArpTimestamp is specified, this value will be compared to the value in ArpTimestamp using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ArpTimestamp must be specified if op_ArpTimestamp is specified.
:type val_c_ArpTimestamp: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DataSourceID: The operator to apply to the field DataSourceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DataSourceID: The internal NetMRI identifier for the collector NetMRI that collected this data record. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DataSourceID: If op_DataSourceID is specified, the field named in this input will be compared to the value in DataSourceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DataSourceID must be specified if op_DataSourceID is specified.
:type val_f_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DataSourceID: If op_DataSourceID is specified, this value will be compared to the value in DataSourceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DataSourceID must be specified if op_DataSourceID is specified.
:type val_c_DataSourceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_DeviceID: The operator to apply to the field DeviceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. DeviceID: The internal NetMRI identifier of the device from which this ARP entry was collected. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_DeviceID: If op_DeviceID is specified, the field named in this input will be compared to the value in DeviceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_DeviceID must be specified if op_DeviceID is specified.
:type val_f_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_DeviceID: If op_DeviceID is specified, this value will be compared to the value in DeviceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_DeviceID must be specified if op_DeviceID is specified.
:type val_c_DeviceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_IPAddrDotted: The operator to apply to the field IPAddrDotted. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. IPAddrDotted: The IP address for this ARP entry, in dotted (or colon-delimited for IPv6) format. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_IPAddrDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_IPAddrDotted: If op_IPAddrDotted is specified, the field named in this input will be compared to the value in IPAddrDotted using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_IPAddrDotted must be specified if op_IPAddrDotted is specified.
:type val_f_IPAddrDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_IPAddrDotted: If op_IPAddrDotted is specified, this value will be compared to the value in IPAddrDotted using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_IPAddrDotted must be specified if op_IPAddrDotted is specified.
:type val_c_IPAddrDotted: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_IPAddrNumeric: The operator to apply to the field IPAddrNumeric. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. IPAddrNumeric: The numerical value of the IP address for this ARP entry. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_IPAddrNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_IPAddrNumeric: If op_IPAddrNumeric is specified, the field named in this input will be compared to the value in IPAddrNumeric using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_IPAddrNumeric must be specified if op_IPAddrNumeric is specified.
:type val_f_IPAddrNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_IPAddrNumeric: If op_IPAddrNumeric is specified, this value will be compared to the value in IPAddrNumeric using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_IPAddrNumeric must be specified if op_IPAddrNumeric is specified.
:type val_c_IPAddrNumeric: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_IfArpID: The operator to apply to the field IfArpID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. IfArpID: The internal NetMRI identifier for this ARP entry. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_IfArpID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_IfArpID: If op_IfArpID is specified, the field named in this input will be compared to the value in IfArpID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_IfArpID must be specified if op_IfArpID is specified.
:type val_f_IfArpID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_IfArpID: If op_IfArpID is specified, this value will be compared to the value in IfArpID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_IfArpID must be specified if op_IfArpID is specified.
:type val_c_IfArpID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_InterfaceID: The operator to apply to the field InterfaceID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. InterfaceID: The internal NetMRI identifier for the local interface for this ARP table entry. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_InterfaceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_InterfaceID: If op_InterfaceID is specified, the field named in this input will be compared to the value in InterfaceID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_InterfaceID must be specified if op_InterfaceID is specified.
:type val_f_InterfaceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_InterfaceID: If op_InterfaceID is specified, this value will be compared to the value in InterfaceID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_InterfaceID must be specified if op_InterfaceID is specified.
:type val_c_InterfaceID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_PhysicalAddr: The operator to apply to the field PhysicalAddr. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. PhysicalAddr: The physical address for this ARP entry. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_PhysicalAddr: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_PhysicalAddr: If op_PhysicalAddr is specified, the field named in this input will be compared to the value in PhysicalAddr using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_PhysicalAddr must be specified if op_PhysicalAddr is specified.
:type val_f_PhysicalAddr: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_PhysicalAddr: If op_PhysicalAddr is specified, this value will be compared to the value in PhysicalAddr using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_PhysicalAddr must be specified if op_PhysicalAddr is specified.
:type val_c_PhysicalAddr: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_VrfID: The operator to apply to the field VrfID. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. VrfID: The internal NetMRI identifier for the VRF to which this ARP entry applies, if available. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_VrfID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_VrfID: If op_VrfID is specified, the field named in this input will be compared to the value in VrfID using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_VrfID must be specified if op_VrfID is specified.
:type val_f_VrfID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_VrfID: If op_VrfID is specified, this value will be compared to the value in VrfID using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_VrfID must be specified if op_VrfID is specified.
:type val_c_VrfID: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param op_ifIndex: The operator to apply to the field ifIndex. Valid values are: =, <>, rlike, not rlike, >, >=, <, <=, like, not like, is null, is not null, between. ifIndex: The SNMP index for the local interface for this ARP table entry. For the between operator the value will be treated as an Array if comma delimited string is passed, and it must contain an even number of values.
:type op_ifIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_f_ifIndex: If op_ifIndex is specified, the field named in this input will be compared to the value in ifIndex using the specified operator. That is, the value in this input will be treated as another field name, rather than a constant value. Either this field or val_c_ifIndex must be specified if op_ifIndex is specified.
:type val_f_ifIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param val_c_ifIndex: If op_ifIndex is specified, this value will be compared to the value in ifIndex using the specified operator. The value in this input will be treated as an explicit constant value. Either this field or val_f_ifIndex must be specified if op_ifIndex is specified.
:type val_c_ifIndex: String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param DeviceGroupID: The internal NetMRI identifier of the device groups to which to limit the results.
:type DeviceGroupID: Array of Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param timestamp: The data returned will represent the if arps as of this date and time. If omitted, the result will indicate the most recently collected data.
:type timestamp: DateTime
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param methods: A list of if arp methods. The listed methods will be called on each if arp returned and included in the output. Available methods are: remote_device, remote_interface, data_source, device, interface, vrf, infradevice.
:type methods: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param include: A list of associated object types to include in the output. The listed associations will be returned as outputs named according to the association name (see outputs below). Available includes are: remote_device, remote_interface, data_source, device, interface, vrf.
:type include: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 0
:param start: The record number to return in the selected page of data. It will always appear, although it may not be the first record. See the :limit for more information.
:type start: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` 1000
:param limit: The size of the page of data, that is, the maximum number of records returned. The limit size will be used to break the data up into pages and the first page with the start record will be returned. So if you have 100 records and use a :limit of 10 and a :start of 10, you will get records 10-19. The maximum limit is 10000.
:type limit: Integer
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` IfArpID
:param sort: The data field(s) to use for sorting the output. Default is IfArpID. Valid values are IfArpID, DataSourceID, InterfaceID, DeviceID, ifIndex, ArpStartTime, ArpEndTime, ArpChangedCols, ArpTimestamp, PhysicalAddr, IPAddrNumeric, IPAddrDotted, ArpDeviceID, ArpInterfaceID, VrfID.
:type sort: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` asc
:param dir: The direction(s) in which to sort the data. Default is 'asc'. Valid values are 'asc' and 'desc'.
:type dir: Array of String
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param select: The list of attributes to return for each IfArp. Valid values are IfArpID, DataSourceID, InterfaceID, DeviceID, ifIndex, ArpStartTime, ArpEndTime, ArpChangedCols, ArpTimestamp, PhysicalAddr, IPAddrNumeric, IPAddrDotted, ArpDeviceID, ArpInterfaceID, VrfID. If empty or omitted, all attributes will be returned.
:type select: Array
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_field: The field name for NIOS GOTO that is used for locating a row position of records.
:type goto_field: String
| ``api version min:`` 2.8
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param goto_value: The value of goto_field for NIOS GOTO that is used for locating a row position of records.
:type goto_value: String
| ``api version min:`` 2.3
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:param xml_filter: A SetFilter XML structure to further refine the search. The SetFilter will be applied AFTER any search query or field values, but before any limit options. The limit and pagination will be enforced after the filter. Remind that this kind of filter may be costly and inefficient if not associated with a database filtering.
:type xml_filter: String
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return if_arps: An array of the IfArp objects that match the specified input criteria.
:rtype if_arps: Array of IfArp
"""
return self.api_list_request(self._get_method_fullname("find"), kwargs)
def interface(self, **kwargs):
"""The local interface for this ARP table entry.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param IfArpID: The internal NetMRI identifier for this ARP entry.
:type IfArpID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The local interface for this ARP table entry.
:rtype : Interface
"""
return self.api_request(self._get_method_fullname("interface"), kwargs)
def remote_device(self, **kwargs):
"""The remote device to which the ARP entry refers, if available.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param IfArpID: The internal NetMRI identifier for this ARP entry.
:type IfArpID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The remote device to which the ARP entry refers, if available.
:rtype : Device
"""
return self.api_request(self._get_method_fullname("remote_device"), kwargs)
def remote_interface(self, **kwargs):
"""The remote interface to which the ARP entry refers, if available.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param IfArpID: The internal NetMRI identifier for this ARP entry.
:type IfArpID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The remote interface to which the ARP entry refers, if available.
:rtype : Interface
"""
return self.api_request(self._get_method_fullname("remote_interface"), kwargs)
def infradevice(self, **kwargs):
"""The device from which this ARP entry was collected.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param IfArpID: The internal NetMRI identifier for this ARP entry.
:type IfArpID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The device from which this ARP entry was collected.
:rtype : InfraDevice
"""
return self.api_request(self._get_method_fullname("infradevice"), kwargs)
def device(self, **kwargs):
"""The device from which this ARP entry was collected.
**Inputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` True
| ``default:`` None
:param IfArpID: The internal NetMRI identifier for this ARP entry.
:type IfArpID: Integer
**Outputs**
| ``api version min:`` None
| ``api version max:`` None
| ``required:`` False
| ``default:`` None
:return : The device from which this ARP entry was collected.
:rtype : Device
"""
return self.api_request(self._get_method_fullname("device"), kwargs)
| 50.001612
| 594
| 0.596854
| 7,557
| 62,052
| 4.852587
| 0.039698
| 0.077445
| 0.05034
| 0.081154
| 0.953151
| 0.952333
| 0.931608
| 0.919255
| 0.91331
| 0.912056
| 0
| 0.005096
| 0.316863
| 62,052
| 1,240
| 595
| 50.041935
| 0.859991
| 0.809337
| 0
| 0
| 0
| 0
| 0.066448
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.428571
| false
| 0
| 0.047619
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
3332c3d3d91bcaa3cd64d074ffb8aae40b764324
| 33,668
|
py
|
Python
|
equations_gallery.py
|
VLSF/Laplace-Beltrami-Multigrid
|
9d0cb37f7585eb59b75f23f78a8d74c6b4afc579
|
[
"MIT"
] | 1
|
2018-12-04T06:44:52.000Z
|
2018-12-04T06:44:52.000Z
|
equations_gallery.py
|
VLSF/Laplace-Beltrami-Multigrid
|
9d0cb37f7585eb59b75f23f78a8d74c6b4afc579
|
[
"MIT"
] | null | null | null |
equations_gallery.py
|
VLSF/Laplace-Beltrami-Multigrid
|
9d0cb37f7585eb59b75f23f78a8d74c6b4afc579
|
[
"MIT"
] | 1
|
2018-10-08T14:44:19.000Z
|
2018-10-08T14:44:19.000Z
|
import numpy as np
from harmonic_equation import harmonic_equation
from equation import equation
import low_level_tools as llt
################################################################################
def eq_11_bc(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(x * y)
current[0][0, :] = u_exact[0, :]
current[0][-1, :] = u_exact[-1, :]
current[0][:, 0] = u_exact[:, 0]
current[0][:, -1] = u_exact[:, -1]
return current
def eq_11_exact(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(x * y)
return np.array([u_exact])
def eq_11_rhs(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_rhs = ((1 + np.exp(x * y)) * x ** 2 + (2 + np.cos(np.pi * x)) * y ** 2 + \
(1 + x * y) * np.exp(-x * y) + y * np.exp(x) + x * np.exp(y) + np.sin(np.pi * x * y)) * np.exp(x * y)
u_rhs[0, :] = 0
u_rhs[N - 1, :] = 0
u_rhs[:, N - 1] = 0
u_rhs[:, 0] = 0
rhs = np.array([u_rhs])
return rhs
def eq_11_coeff(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
###
a11 = 1 + np.exp(x * y)
b11 = 2 + np.cos(np.pi * x)
c11 = np.exp(-x * y)
d11 = np.exp(x)
e11 = np.exp(y)
f11 = np.sin(np.pi * x * y)
###
coeff1 = [a11, b11, c11, d11, e11, f11]
coeff = np.array([coeff1])
return coeff
################################################################################
def eq_red_fox_bc(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(x * y)
current[0][0, :] = u_exact[0, :]
current[0][-1, :] = u_exact[-1, :]
current[0][:, 0] = u_exact[:, 0]
current[0][:, -1] = u_exact[:, -1]
return current
def eq_red_fox_exact(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(x * y)
return np.array([u_exact])
def eq_red_fox_rhs(current, a=1):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_rhs = (x**2 + y**2 + a*y)*np.exp(x*y)
u_rhs[0, :] = 0
u_rhs[N - 1, :] = 0
u_rhs[:, N - 1] = 0
u_rhs[:, 0] = 0
rhs = np.array([u_rhs])
return rhs
def eq_red_fox_coeff(current, a=1):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
###
a11 = np.ones((N, N))
b11 = np.ones((N, N))
c11 = np.zeros((N, N))
d11 = a*np.ones((N, N))
e11 = np.zeros((N, N))
f11 = np.zeros((N, N))
###
coeff1 = [a11, b11, c11, d11, e11, f11]
coeff = np.array([coeff1])
return coeff
################################################################################
def eq_00_bc(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.sin(np.pi * x) * np.sin(np.pi * y) / 2
current[0][0, :] = u_exact[0, :]
current[0][-1, :] = u_exact[-1, :]
current[0][:, 0] = u_exact[:, 0]
current[0][:, -1] = u_exact[:, -1]
return current
def eq_00_exact(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.sin(np.pi * x) * np.sin(np.pi * y) / 2
return np.array([u_exact])
def eq_00_rhs(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_rhs = -np.pi ** 2 * np.sin(np.pi * x) * np.sin(np.pi * y) * (
4 + y * np.cos(x * np.pi) + 4 + x * np.exp(-x * y)) / 2 + \
np.pi ** 2 * np.cos(np.pi * x) * np.cos(np.pi * y) * np.exp(y * x) / 2 + \
np.pi * np.cos(np.pi * x) * np.sin(np.pi * y) * x * y ** 3 / 2 + \
np.pi * np.sin(np.pi * x) * np.cos(np.pi * y) * (y + x ** 2 + 0.2) / 2 + \
np.sinh(x + 3 * y) * np.sin(np.pi * x) * np.sin(np.pi * y) / 2
u_rhs[0, :] = 0;
u_rhs[N - 1, :] = 0;
u_rhs[:, N - 1] = 0;
u_rhs[:, 0] = 0
rhs = np.array([u_rhs])
return rhs
def eq_00_coeff(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
###
a11 = 4 + y * np.cos(x * np.pi)
b11 = 4 + x * np.exp(-x * y)
c11 = np.exp(y * x)
d11 = x * y ** 3
e11 = y + x ** 2 + 0.2
f11 = np.sinh(x + 3 * y)
###
coeff1 = [a11, b11, c11, d11, e11, f11]
coeff = np.array([coeff1])
return coeff
################################################################################
def eq_12_bc(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(x + y)
current[0][0, :] = u_exact[0, :]
current[0][-1, :] = u_exact[-1, :]
current[0][:, 0] = u_exact[:, 0]
current[0][:, -1] = u_exact[:, -1]
return current
def eq_12_exact(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(x + y)
return np.array([u_exact])
def eq_12_rhs(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_rhs = (4 + np.cos(2 * np.pi * x * y) + 2 + np.sin(np.pi * x * y) + np.exp(-x * y) \
+ np.exp(x) + np.exp(y) + np.sin(np.pi * x * y) + 2) * np.exp(x + y)
u_rhs[0, :] = 0;
u_rhs[N - 1, :] = 0;
u_rhs[:, N - 1] = 0;
u_rhs[:, 0] = 0
rhs = np.array([u_rhs])
return rhs
def eq_12_coeff(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
###
a11 = 4 + np.cos(2 * np.pi * x * y)
b11 = 2 + np.sin(np.pi * x * y)
c11 = np.exp(-x * y)
d11 = np.exp(x)
e11 = np.exp(y)
f11 = np.sin(np.pi * x * y) + 2
###
coeff1 = [a11, b11, c11, d11, e11, f11]
coeff = np.array([coeff1])
return coeff
################################################################################
def eq_13_bc(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = y * np.exp(x)
current[0][0, :] = u_exact[0, :]
current[0][-1, :] = u_exact[-1, :]
current[0][:, 0] = u_exact[:, 0]
current[0][:, -1] = u_exact[:, -1]
return current
def eq_13_exact(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = y * np.exp(x)
return np.array([u_exact])
def eq_13_rhs(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_rhs = (2 + x * np.exp(x * y) + 6 + np.sin(np.pi * x * y)) * y * np.exp(x) + \
x * np.exp(-x * y) * np.exp(x) + y ** 2 * np.exp(2 * x) + x * y ** 2 * np.exp(x) * np.exp(y)
u_rhs[0, :] = 0;
u_rhs[N - 1, :] = 0;
u_rhs[:, N - 1] = 0;
u_rhs[:, 0] = 0
rhs = np.array([u_rhs])
return rhs
def eq_13_coeff(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
###
a11 = 4 + y * np.exp(-x * y)
b11 = 2 + x * np.exp(x * y)
c11 = x * np.exp(-x * y)
d11 = y * np.exp(x)
e11 = x * y ** 2 * np.exp(y)
f11 = 6 + np.sin(np.pi * x * y)
###
coeff1 = [a11, b11, c11, d11, e11, f11]
coeff = np.array([coeff1])
return coeff
################################################################################
def eq_14_bc(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(x + y)
current[0][0, :] = u_exact[0, :]
current[0][-1, :] = u_exact[-1, :]
current[0][:, 0] = u_exact[:, 0]
current[0][:, -1] = u_exact[:, -1]
return current
def eq_14_exact(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(x + y)
return np.array([u_exact])
def eq_14_rhs(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
b = 4
a = 3
u_rhs = (b + np.exp(x * y) + a + np.exp(-x * y) +
np.cos(np.pi*(x + 2*y)) + np.sin(np.pi*(y + 2*x)))*np.exp(x + y)
u_rhs[0, :] = 0;
u_rhs[N - 1, :] = 0;
u_rhs[:, N - 1] = 0;
u_rhs[:, 0] = 0
rhs = np.array([u_rhs])
return rhs
def eq_14_coeff(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
###
b = 4
a = 3
a11 = b + np.exp(x * y)
b11 = a + np.exp(-x * y)
c11 = np.zeros((N, N))
d11 = np.cos(np.pi*(x + 2*y))
e11 = np.sin(np.pi*(y + 2*x))
f11 = np.zeros((N, N))
###
coeff1 = [a11, b11, c11, d11, e11, f11]
coeff = np.array([coeff1])
return coeff
################################################################################
def eq_21_bc(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(x * y)
v_exact = np.exp(2 * x * y)
###
current[0][0, :] = u_exact[0, :]
current[0][-1, :] = u_exact[-1, :]
current[0][:, 0] = u_exact[:, 0]
current[0][:, -1] = u_exact[:, -1]
###
current[1][0, :] = v_exact[0, :]
current[1][-1, :] = v_exact[-1, :]
current[1][:, 0] = v_exact[:, 0]
current[1][:, -1] = v_exact[:, -1]
###
return current
def eq_21_exact(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(x * y)
v_exact = np.exp(2 * x * y)
exact = np.array([u_exact, v_exact])
return exact
def eq_21_rhs(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_rhs = 20 * np.exp(2 * x * y) * x ** 2 - x - np.exp(-x * y) * y
v_rhs = np.exp(x * y) + 4 * (7 + (np.sin(np.pi * x * y)) ** 2) * np.exp(2 * x * y) * y ** 2 + 16 * np.exp(
3 * x * y) * x ** 2 - \
2 * x * np.exp(2 * x * y - x) - 2 * y * np.exp(2 * x * y - y) + (2 + 4 * x * y) * np.sin(
np.pi * x * y) * np.exp(2 * x * y)
v_rhs[0, :] = 0
v_rhs[N - 1, :] = 0
v_rhs[:, N - 1] = 0
v_rhs[:, 0] = 0
u_rhs[0, :] = 0
u_rhs[N - 1, :] = 0
u_rhs[:, N - 1] = 0
u_rhs[:, 0] = 0
rhs = np.array([u_rhs, v_rhs])
return rhs
def eq_21_coeff(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
###
a11 = 20 * np.exp(x * y)
b11 = 7 + (np.cos(np.pi * x * y)) ** 2
c11 = np.cos(np.pi * x * y)
d11 = -np.exp(-2 * x * y)
e11 = -np.exp(-x * y)
f11 = np.zeros((N, N))
###
a12 = np.zeros((N, N))
b12 = np.zeros((N, N))
c12 = np.zeros((N, N))
d12 = np.zeros((N, N))
e12 = np.zeros((N, N))
f12 = -((7 + (np.cos(np.pi * x * y)) ** 2) * y ** 2 + np.cos(np.pi * x * y) * (1 + x * y)) * np.exp(-x * y)
###
a22 = 4 * np.exp(x * y)
b22 = 7 + (np.sin(np.pi * x * y)) ** 2
c22 = np.sin(np.pi * x * y)
d22 = -np.exp(-y)
e22 = -np.exp(-x)
f22 = np.zeros((N, N))
###
a21 = np.zeros((N, N))
b21 = np.zeros((N, N))
c21 = np.zeros((N, N))
d21 = np.zeros((N, N))
e21 = np.zeros((N, N))
f21 = np.ones((N, N))
###
coeff1 = [a11, b11, c11, d11, e11, f11, a12, b12, c12, d12, e12, f12]
coeff2 = [a21, b21, c21, d21, e21, f21, a22, b22, c22, d22, e22, f22]
coeff = np.array([coeff1, coeff2])
return coeff
################################################################################
def eq_22_bc(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(x * y)
v_exact = np.exp(x + y)
###
current[0][0, :] = u_exact[0, :]
current[0][-1, :] = u_exact[-1, :]
current[0][:, 0] = u_exact[:, 0]
current[0][:, -1] = u_exact[:, -1]
###
current[1][0, :] = v_exact[0, :]
current[1][-1, :] = v_exact[-1, :]
current[1][:, 0] = v_exact[:, 0]
current[1][:, -1] = v_exact[:, -1]
###
return current
def eq_22_exact(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(x * y)
v_exact = np.exp(x + y)
exact = np.array([u_exact, v_exact])
return exact
def eq_22_rhs(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_rhs = ((1 + np.exp(x * y)) * x ** 2 + (2 + np.cos(np.pi * x)) * y ** 2 +
(1 + x * y) * np.exp(-x * y) + y * np.exp(x) + x * np.exp(y) + np.sin(np.pi * x * y)) * np.exp(x * y) + \
(4 + np.cos(2 * np.pi * x * y) + 2 + np.sin(np.pi * x * y) + np.exp(-x * y)
+ np.exp(x) + np.exp(y) + np.sin(np.pi * x * y) + 2) * np.exp(x + y)
v_rhs = (2 + np.log(1 + x) + 4 + np.exp(2 * x * y + 3) / 200 + np.log(1 + x * y) +
(1 + np.cos(4 * np.pi * x * y)) / 3 + 16 * np.ones((N, N))) * np.exp(x + y) + \
(20 * np.exp(x * y) * x ** 2 + (7 + (np.cos(np.pi * x * y)) ** 2) * y ** 2 +
np.cos(np.pi * x * y) * (x * y + 1) - y * np.exp(-2 * x * y) - x * np.exp(-x * y)) * np.exp(x * y)
v_rhs[0, :] = 0
v_rhs[N - 1, :] = 0
v_rhs[:, N - 1] = 0
v_rhs[:, 0] = 0
u_rhs[0, :] = 0
u_rhs[N - 1, :] = 0
u_rhs[:, N - 1] = 0
u_rhs[:, 0] = 0
rhs = np.array([u_rhs, v_rhs])
return rhs
def eq_22_coeff(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
###
a11 = 1 + np.exp(x * y)
b11 = 2 + np.cos(np.pi * x)
c11 = np.exp(-x * y)
d11 = np.exp(x)
e11 = np.exp(y)
f11 = np.sin(np.pi * x * y)
###
a12 = 4 + np.cos(2 * np.pi * x * y)
b12 = 2 + np.sin(np.pi * x * y)
c12 = np.exp(-x * y)
d12 = np.exp(x)
e12 = np.exp(y)
f12 = np.sin(np.pi * x * y) + 2
###
a22 = 2 + np.log(1 + x)
b22 = 4 * np.ones((N, N))
c22 = np.exp(2 * x * y + 3) / 200
d22 = np.log(1 + x * y)
e22 = (1 + np.cos(4 * np.pi * x * y)) / 3
f22 = 16 * np.ones((N, N))
###
a21 = 20 * np.exp(x * y)
b21 = 7 + (np.cos(np.pi * x * y)) ** 2
c21 = np.cos(np.pi * x * y)
d21 = -np.exp(-2 * x * y)
e21 = -np.exp(-x * y)
f21 = np.zeros((N, N))
###
coeff1 = [a11, b11, c11, d11, e11, f11, a12, b12, c12, d12, e12, f12]
coeff2 = [a21, b21, c21, d21, e21, f21, a22, b22, c22, d22, e22, f22]
coeff = np.array([coeff1, coeff2])
return coeff
################################################################################
def get_quasilinear(dim, number, a=1):
"""
This function provide two 1d quasilinear equations and to 2d quasilinear
equations.
-------------------
dim = 1, number = 1
[(1 + exp(x*y))d2y + (2 + cos(pi*x))d2x + exp(-x*y)dxdy + exp(x)dx +
exp(y)dy + sin(pi*x*y)]u = rhs
u_exact = exp(x*y), bc and rhs are taken from the operator and exact solution.
-------------------
dim = 1, number = 2
[(4 + cos(2*pi*x*y))d2y + (2 + sin(pi*x*y))d2x + exp(-x*y)dxdy + exp(x)dx +
exp(y)dy + sin(pi*x*y) + 2]u = rhs
u_exact = exp(x+y), bc and rhs are taken from the operator and exact solution.
-------------------
dim = 2, number = 1
Add description later!
Parameters
----------
dim: int
Dimensionality: 2 for two equations, 1 for the one equation.
number: int
The number of the equation: 1 or 2 in case of any dimensionality.
"""
if dim == 1:
if number == 0:
quasilinear = equation(eq_00_coeff, eq_00_rhs, 1, eq_00_bc, eq_00_exact)
if number == 1:
quasilinear = equation(eq_11_coeff, eq_11_rhs, 1, eq_11_bc, eq_11_exact)
if number == 2:
quasilinear = equation(eq_12_coeff, eq_12_rhs, 1, eq_12_bc, eq_12_exact)
if number == 3:
quasilinear = equation(eq_13_coeff, eq_13_rhs, 1, eq_13_bc, eq_13_exact)
if number == 4:
quasilinear = equation(eq_14_coeff, eq_14_rhs, 1, eq_14_bc, eq_14_exact)
if number == 'red fox':
rhs = lambda x: eq_red_fox_rhs(x, a)
coeff = lambda x: eq_red_fox_coeff(x, a)
quasilinear = equation(coeff, rhs, 1, eq_red_fox_bc, eq_red_fox_exact)
if dim == 2:
if number == 1:
quasilinear = equation(eq_21_coeff, eq_21_rhs, 2, eq_21_bc, eq_21_exact)
if number == 2:
quasilinear = equation(eq_22_coeff, eq_22_rhs, 2, eq_22_bc, eq_22_exact)
return quasilinear
################################################################################
def nleq_21_bc(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(-x * y)
v_exact = np.exp(-2 * x * y)
###
current[0][0, :] = u_exact[0, :]
current[0][-1, :] = u_exact[-1, :]
current[0][:, 0] = u_exact[:, 0]
current[0][:, -1] = u_exact[:, -1]
###
current[1][0, :] = v_exact[0, :]
current[1][-1, :] = v_exact[-1, :]
current[1][:, 0] = v_exact[:, 0]
current[1][:, -1] = v_exact[:, -1]
###
return current
def nleq_21_exact(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(-x * y)
v_exact = np.exp(-2 * x * y)
exact = np.array([u_exact, v_exact])
return exact
def nleq_21_rhs(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_rhs = y ** 2 + np.exp(x * y) * x ** 2 + np.exp(-5 * x * y)
v_rhs = np.exp(-5 * x * y) - 2 * y * np.exp(-2 * x * y) / 7 + \
4 * np.exp(-2 * x * y) * y ** 2 + 4 * (np.cos(np.pi * x) ** 2 + 1) * np.exp(-2 * x * y) * x ** 2
v_rhs[0, :] = 0;
v_rhs[-1, :] = 0;
v_rhs[:, -1] = 0;
v_rhs[:, 0] = 0
u_rhs[0, :] = 0;
u_rhs[-1, :] = 0;
u_rhs[:, -1] = 0;
u_rhs[:, 0] = 0
rhs = np.array([u_rhs, v_rhs])
return rhs
def nleq_21_coeff(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
u, v = current
x, y = np.meshgrid(z, z, indexing='ij')
###
a11 = np.exp(2 * x * y)
b11 = np.exp(x * y)
c11 = np.zeros((N, N))
d11 = np.zeros((N, N))
e11 = np.zeros((N, N))
f11 = np.zeros((N, N))
###
a12 = np.zeros((N, N))
b12 = np.zeros((N, N))
c12 = np.zeros((N, N))
d12 = np.zeros((N, N))
e12 = np.zeros((N, N))
f12 = u * v
###
a22 = np.cos(np.pi * x) ** 2 + 1
b22 = np.ones((N, N))
c22 = np.zeros((N, N))
d22 = np.ones((N, N)) / 7
e22 = np.zeros((N, N))
f22 = np.zeros((N, N))
###
a21 = np.zeros((N, N))
b21 = np.zeros((N, N))
c21 = np.zeros((N, N))
d21 = np.zeros((N, N))
e21 = np.zeros((N, N))
f21 = np.exp(-x * y) * u * v
###
coeff1 = [a11, b11, c11, d11, e11, f11, a12, b12, c12, d12, e12, f12]
coeff2 = [a21, b21, c21, d21, e21, f21, a22, b22, c22, d22, e22, f22]
coeff = np.array([coeff1, coeff2])
return coeff
def nleq_21_lcoeff(current):
N, M = current[0].shape
u, v = current
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
###
a11 = np.exp(2 * x * y)
b11 = np.exp(x * y)
c11 = np.zeros((N, N))
d11 = np.zeros((N, N))
e11 = np.zeros((N, N))
f11 = v ** 2
###
a12 = np.zeros((N, N))
b12 = np.zeros((N, N))
c12 = np.zeros((N, N))
d12 = np.zeros((N, N))
e12 = np.zeros((N, N))
f12 = 2 * u * v
###
a22 = np.cos(np.pi * x) ** 2 + 1
b22 = np.ones((N, N))
c22 = np.zeros((N, N))
d22 = np.ones((N, N)) / 7
e22 = np.zeros((N, N))
f22 = np.exp(-x * y) * u ** 2
###
a21 = np.zeros((N, N))
b21 = np.zeros((N, N))
c21 = np.zeros((N, N))
d21 = np.zeros((N, N))
e21 = np.zeros((N, N))
f21 = 2 * np.exp(-x * y) * u * v
###
coeff1 = [a11, b11, c11, d11, e11, f11, a12, b12, c12, d12, e12, f12]
coeff2 = [a21, b21, c21, d21, e21, f21, a22, b22, c22, d22, e22, f22]
coeff = np.array([coeff1, coeff2])
return coeff
################################################################################
def nleq1_bc(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(x * y)
current[0][0, :] = u_exact[0, :]
current[0][-1, :] = u_exact[-1, :]
current[0][:, 0] = u_exact[:, 0]
current[0][:, -1] = u_exact[:, -1]
return current
def nleq1_exact(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = np.exp(x * y)
return np.array([u_exact])
def nleq1_rhs(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_rhs = np.exp(x * y) * ((4 + np.exp(x * y)) * y ** 2 + (4 + np.exp(-x * y)) * x ** 2 + \
(1 + x * y) * np.exp(-2 * x * y) + np.cos(np.pi * x * y) * y + np.sin(
np.pi * x * y) * x + np.sinh(2 * x * y))
u_rhs[0, :] = 0;
u_rhs[N - 1, :] = 0;
u_rhs[:, N - 1] = 0;
u_rhs[:, 0] = 0
rhs = np.array([u_rhs])
return rhs
def nleq1_l_coeff(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
###
a11 = 4 + np.exp(-x * y)
b11 = 4 + current[0]
c11 = np.exp(-2 * x * y)
d11 = np.cos(np.pi * x * y)
e11 = np.sin(np.pi * x * y)
f11 = np.sinh(2 * x * y) + llt.d2x(current[0], N)
###
coeff1 = [a11, b11, c11, d11, e11, f11]
coeff = np.array([coeff1])
return coeff
def nleq1_coeff(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
###
a11 = 4 + np.exp(-x * y)
b11 = 4 + current[0]
c11 = np.exp(-2 * x * y)
d11 = np.cos(np.pi * x * y)
e11 = np.sin(np.pi * x * y)
f11 = np.sinh(2 * x * y)
###
coeff1 = [a11, b11, c11, d11, e11, f11]
coeff = np.array([coeff1])
return coeff
################################################################################
def get_nonlinear(dim):
global nonlinear
if dim == 2:
nonlinear = equation(nleq_21_coeff, nleq_21_rhs, 2, nleq_21_bc, nleq_21_exact, nleq_21_lcoeff)
if dim == 1:
nonlinear = equation(nleq1_coeff, nleq1_rhs, 1, nleq1_bc, nleq1_exact, l_coeff=nleq1_l_coeff)
return nonlinear
################################################################################
def trivial_harmonic_bc(current):
N, M = current[0].shape
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_exact = x
v_exact = y
###
current[0][0, :] = u_exact[0, :]
current[0][-1, :] = u_exact[-1, :]
current[0][:, 0] = u_exact[:, 0]
current[0][:, -1] = u_exact[:, -1]
###
current[1][0, :] = v_exact[0, :]
current[1][-1, :] = v_exact[-1, :]
current[1][:, 0] = v_exact[:, 0]
current[1][:, -1] = v_exact[:, -1]
###
return current
def trivial_harmonic_rhs(current):
return np.zeros_like(current)
def basic_harmonic_coeff(current):
N, M = current[0].shape
u, v = current
u_x, v_x = llt.dx(u, N), llt.dx(v, N)
u_y, v_y = llt.dy(u, N), llt.dy(v, N)
###
a11 = u_x ** 2 + v_x ** 2
b11 = u_y ** 2 + v_y ** 2
c11 = -2 * (u_x * u_y + v_x * v_y)
d11 = np.zeros((N, N))
e11 = np.zeros((N, N))
f11 = np.zeros((N, N))
###
a12 = np.zeros((N, N))
b12 = np.zeros((N, N))
c12 = np.zeros((N, N))
d12 = np.zeros((N, N))
e12 = np.zeros((N, N))
f12 = np.zeros((N, N))
###
a22 = u_x ** 2 + v_x ** 2
b22 = u_y ** 2 + v_y ** 2
c22 = -2 * (u_x * u_y + v_x * v_y)
d22 = np.zeros((N, N))
e22 = np.zeros((N, N))
f22 = np.zeros((N, N))
###
a21 = np.zeros((N, N))
b21 = np.zeros((N, N))
c21 = np.zeros((N, N))
d21 = np.zeros((N, N))
e21 = np.zeros((N, N))
f21 = np.zeros((N, N))
###
coeff1 = [a11, b11, c11, d11, e11, f11, a12, b12, c12, d12, e12, f12]
coeff2 = [a21, b21, c21, d21, e21, f21, a22, b22, c22, d22, e22, f22]
coeff = np.array([coeff1, coeff2])
return coeff
def diagonal_metrics(current):
N, M = current[0].shape
g = np.zeros((2, 2, N, N))
g[1, 1], g[0, 0] = 1, 1
return g
def harmonic_coeff(current, metrics=diagonal_metrics):
N, M = current[0].shape
u, v = current
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_x, v_x = llt.dx(u, N), llt.dx(v, N)
u_y, v_y = llt.dy(u, N), llt.dy(v, N)
g = metrics(current)
det_g_x = g[0, 0]*g[1, 1] - g[0, 1]*g[1, 0]
J_xi = u_x*v_y - u_y*v_x
R = J_xi**2
###
a11 = g[0, 0]*u_x**2 + g[1, 1]*v_x**2 + 2*g[0, 1]*u_x*v_x
b11 = g[0, 0]*u_y**2 + g[1, 1]*v_y**2 + 2*g[0, 1]*u_y*v_y
c11 = -2*(g[0, 0]*u_x*u_y + g[1, 1]*v_x*v_y + g[0, 1]*(v_x*u_y + u_x*v_y))
d11 = R*llt.dy(g[0, 1]/np.sqrt(det_g_x), N)
e11 = -R*llt.dx(g[0, 1]/np.sqrt(det_g_x), N)
f11 = np.zeros((N, N))
###
a12 = np.zeros((N, N))
b12 = np.zeros((N, N))
c12 = np.zeros((N, N))
d12 = R*llt.dy(g[1, 1]/np.sqrt(det_g_x), N)
e12 = -R*llt.dx(g[1, 1]/np.sqrt(det_g_x), N)
f12 = np.zeros((N, N))
###
a22 = a11
b22 = b11
c22 = c11
d22 = -R*llt.dy(g[1, 0]/np.sqrt(det_g_x), N)
e22 = R*llt.dx(g[1, 0]/np.sqrt(det_g_x), N)
f22 = np.zeros((N, N))
###
a21 = np.zeros((N, N))
b21 = np.zeros((N, N))
c21 = np.zeros((N, N))
d21 = -R*llt.dy(g[0, 0]/np.sqrt(det_g_x), N)
e21 = R*llt.dx(g[0, 0]/np.sqrt(det_g_x), N)
f21 = np.zeros((N, N))
###
coeff1 = [a11, b11, c11, d11, e11, f11, a12, b12, c12, d12, e12, f12]
coeff2 = [a21, b21, c21, d21, e21, f21, a22, b22, c22, d22, e22, f22]
coeff = np.array([coeff1, coeff2])
return coeff
def winslow_coeff(current, metrics=diagonal_metrics):
N, M = current[0].shape
u, v = current
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_x, v_x = llt.dx(u, N), llt.dx(v, N)
u_y, v_y = llt.dy(u, N), llt.dy(v, N)
g = metrics(current)
det_g_x = g[0, 0]*g[1, 1] - g[0, 1]*g[1, 0]
J_xi = u_x*v_y - u_y*v_x
R = np.sqrt(det_g_x)*J_xi**2
###
a11 = g[0, 0]*u_x**2 + g[1, 1]*v_x**2 + 2*g[0, 1]*u_x*v_x
b11 = g[0, 0]*u_y**2 + g[1, 1]*v_y**2 + 2*g[0, 1]*u_y*v_y
c11 = -2*(g[0, 0]*u_x*u_y + g[1, 1]*v_x*v_y + g[0, 1]*(v_x*u_y + u_x*v_y))
d11 = R*llt.dy(g[0, 1]/det_g_x, N)
e11 = -R*llt.dx(g[0, 1]/det_g_x, N)
f11 = np.zeros((N, N))
###
a12 = np.zeros((N, N))
b12 = np.zeros((N, N))
c12 = np.zeros((N, N))
d12 = R*llt.dy(g[1, 1]/det_g_x, N)
e12 = -R*llt.dx(g[1, 1]/det_g_x, N)
f12 = np.zeros((N, N))
###
a22 = a11
b22 = b11
c22 = c11
d22 = -R*llt.dy(g[1, 0]/det_g_x, N)
e22 = R*llt.dx(g[1, 0]/det_g_x, N)
f22 = np.zeros((N, N))
###
a21 = np.zeros((N, N))
b21 = np.zeros((N, N))
c21 = np.zeros((N, N))
d21 = -R*llt.dy(g[0, 0]/det_g_x, N)
e21 = R*llt.dx(g[0, 0]/det_g_x, N)
f21 = np.zeros((N, N))
###
coeff1 = [a11, b11, c11, d11, e11, f11, a12, b12, c12, d12, e12, f12]
coeff2 = [a21, b21, c21, d21, e21, f21, a22, b22, c22, d22, e22, f22]
coeff = np.array([coeff1, coeff2])
return coeff
def advection_harmonic_rhs(current, metrics=diagonal_metrics):
rhs = np.zeros_like(current)
N, M = current[0].shape
u, v = current
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_x, v_x = llt.dx(u, N), llt.dx(v, N)
u_y, v_y = llt.dy(u, N), llt.dy(v, N)
g = metrics(N)
det_g_x = g[0, 0]*g[1, 1] - g[0, 1]*g[1, 0]
J_xi = u_x*v_y - u_y*v_x
R = J_xi**2
rhs[0] = -R*llt.dy(g[0, 1]/np.sqrt(det_g_x), N)*u_x + R*llt.dx(g[0, 1]/np.sqrt(det_g_x), N)*u_y
- R*llt.dy(g[1, 1]/np.sqrt(det_g_x), N)*v_x + R*llt.dx(g[1, 1]/np.sqrt(det_g_x), N)*v_y
rhs[1] = R*llt.dy(g[1, 0]/np.sqrt(det_g_x), N)*v_x - R*llt.dx(g[1, 0]/np.sqrt(det_g_x), N)*v_y
+ R*llt.dy(g[0, 0]/np.sqrt(det_g_x), N)*u_x - R*llt.dx(g[0, 0]/np.sqrt(det_g_x), N)*u_y
return rhs
def advection_free_harmonic_coeff(current, metrics=diagonal_metrics):
N, M = current[0].shape
u, v = current
z = np.linspace(0, 1, N)
x, y = np.meshgrid(z, z, indexing='ij')
u_x, v_x = llt.dx(u, N), llt.dx(v, N)
u_y, v_y = llt.dy(u, N), llt.dy(v, N)
g = metrics(N)
det_g_x = g[0, 0]*g[1, 1] - g[0, 1]*g[1, 0]
J_xi = u_x*v_y - u_y*v_x
R = J_xi**2
###
a11 = g[0, 0]*u_y**2 + g[1, 1]*v_y**2 + 2*g[0, 1]*u_y*v_y
b11 = g[0, 0]*u_x**2 + g[1, 1]*v_x**2 + 2*g[0, 1]*u_x*v_x
c11 = -2*(g[0, 0]*u_x*u_y + g[1, 1]*v_x*v_y + g[0, 1]*(v_x*u_y + u_x*v_y))
d11 = np.zeros((N, N))
e11 = np.zeros((N, N))
f11 = np.zeros((N, N))
###
a12 = np.zeros((N, N))
b12 = np.zeros((N, N))
c12 = np.zeros((N, N))
d12 = np.zeros((N, N))
e12 = np.zeros((N, N))
f12 = np.zeros((N, N))
###
a22 = a11
b22 = b11
c22 = c11
d22 = np.zeros((N, N))
e22 = np.zeros((N, N))
f22 = np.zeros((N, N))
###
a21 = np.zeros((N, N))
b21 = np.zeros((N, N))
c21 = np.zeros((N, N))
d21 = np.zeros((N, N))
e21 = np.zeros((N, N))
f21 = np.zeros((N, N))
###
coeff1 = [a11, b11, c11, d11, e11, f11, a12, b12, c12, d12, e12, f12]
coeff2 = [a21, b21, c21, d21, e21, f21, a22, b22, c22, d22, e22, f22]
coeff = np.array([coeff1, coeff2])
return coeff
def basic_mixed_harmonic_coeff(current):
N, M = current[0].shape
u, v = current
u_x, v_x = llt.dx(u, N), llt.dx(v, N)
u_y, v_y = llt.dy(u, N), llt.dy(v, N)
u_x_f, v_x_f = llt.dx_forward(u, N), llt.dx_forward(v, N)
u_y_f, v_y_f = llt.dy_forward(u, N), llt.dy_forward(v, N)
u_x_b, v_x_b = llt.dx_backward(u, N), llt.dx_backward(v, N)
u_y_b, v_y_b = llt.dy_backward(u, N), llt.dy_backward(v, N)
###
a11 = u_x_f * u_x_b + v_x_f * v_x_b
b11 = u_y_f * u_y_b + v_y_f * v_y_b
c11 = -2 * (u_x * u_y + v_x * v_y)
d11 = np.zeros((N, N))
e11 = np.zeros((N, N))
f11 = np.zeros((N, N))
###
a12 = np.zeros((N, N))
b12 = np.zeros((N, N))
c12 = np.zeros((N, N))
d12 = np.zeros((N, N))
e12 = np.zeros((N, N))
f12 = np.zeros((N, N))
###
a22 = u_x_f * u_x_b + v_x_f * v_x_b
b22 = u_y_f * u_y_b + v_y_f * v_y_b
c22 = -2 * (u_x * u_y + v_x * v_y)
d22 = np.zeros((N, N))
e22 = np.zeros((N, N))
f22 = np.zeros((N, N))
###
a21 = np.zeros((N, N))
b21 = np.zeros((N, N))
c21 = np.zeros((N, N))
d21 = np.zeros((N, N))
e21 = np.zeros((N, N))
f21 = np.zeros((N, N))
###
coeff1 = [a11, b11, c11, d11, e11, f11, a12, b12, c12, d12, e12, f12]
coeff2 = [a21, b21, c21, d21, e21, f21, a22, b22, c22, d22, e22, f22]
coeff = np.array([coeff1, coeff2])
return coeff
def basic_fair_newton_harmonic_linear_coeff(current):
N, M = current[0].shape
u, v = current
u_x, v_x = llt.dx(u, N), llt.dx(v, N)
u_y, v_y = llt.dy(u, N), llt.dy(v, N)
u_xx, v_xx = llt.d2x(u, N), llt.d2x(v, N)
u_yy, v_yy = llt.d2y(u, N), llt.d2y(v, N)
u_xy, v_xy = llt.dxdy(u, N), llt.dxdy(v, N)
u0, v0 = np.zeros_like(u), np.zeros_like(v)
###
a11 = u_x ** 2 + v_x ** 2
b11 = u_y ** 2 + v_y ** 2
c11 = -2 * (u_x * u_y + v_x * v_y)
d11 = 2 * (u_yy * u_x - u_xy * u_y)
e11 = 2 * (u_xx * u_y - u_xy * u_x)
f11 = np.zeros((N, N))
###
a12 = np.zeros((N, N))
b12 = np.zeros((N, N))
c12 = np.zeros((N, N))
d12 = 2 * (u_yy * v_x - u_xy * v_y)
e12 = 2 * (u_xx * v_y - u_xy * v_x)
f12 = np.zeros((N, N))
###
a22 = u_x ** 2 + v_x ** 2
b22 = u_y ** 2 + v_y ** 2
c22 = -2 * (u_x * u_y + v_x * v_y)
d22 = 2 * (v_yy * v_x - v_xy * v_y)
e22 = 2 * (v_xx * v_y - v_xy * v_x)
f22 = np.zeros((N, N))
###
a21 = np.zeros((N, N))
b21 = np.zeros((N, N))
c21 = np.zeros((N, N))
d21 = 2 * (v_yy * u_x - v_xy * u_y)
e21 = 2 * (v_xx * u_y - v_xy * u_x)
f21 = np.zeros((N, N))
###
coeff1 = [a11, b11, c11, d11, e11, f11, a12, b12, c12, d12, e12, f12]
coeff2 = [a21, b21, c21, d21, e21, f21, a22, b22, c22, d22, e22, f22]
coeff = np.array([coeff1, coeff2])
return coeff
################################################################################
def get_harmonic(name):
global res
if name == 'Fair Newton':
res = harmonic_equation(basic_harmonic_coeff, trivial_harmonic_rhs, 2, bc=trivial_harmonic_bc,
l_coeff=basic_fair_newton_harmonic_linear_coeff)
if name == 'Frozen Metric':
res = harmonic_equation(basic_harmonic_coeff, trivial_harmonic_rhs, 2, bc=trivial_harmonic_bc)
if name == 'Harmonic Frozen Metric':
res = harmonic_equation(harmonic_coeff, trivial_harmonic_rhs, 2, bc=trivial_harmonic_bc)
if name == 'Winslow Frozen Metric':
res = harmonic_equation(winslow_coeff, trivial_harmonic_rhs, 2, bc=trivial_harmonic_bc)
if name == 'Harmonic with Frozen Metric':
res = harmonic_equation(harmonic_coeff, trivial_harmonic_rhs, 2, bc=trivial_harmonic_bc)
if name == 'Upwind + Downwind Frozen Metric':
res = harmonic_equation(basic_mixed_harmonic_coeff, trivial_harmonic_rhs, 2, bc=trivial_harmonic_bc)
return res
| 30.195516
| 118
| 0.478407
| 6,189
| 33,668
| 2.484085
| 0.02666
| 0.026929
| 0.069728
| 0.078444
| 0.865943
| 0.845193
| 0.817744
| 0.792767
| 0.784051
| 0.777612
| 0
| 0.0927
| 0.279405
| 33,668
| 1,114
| 119
| 30.222621
| 0.540992
| 0.022989
| 0
| 0.729761
| 0
| 0
| 0.007187
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.062714
| false
| 0
| 0.004561
| 0.00114
| 0.129989
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
3370b548b14bab05cbf97184fec103126d1eccad
| 3,571
|
py
|
Python
|
src/day20.py
|
StenAL/Advent-of-Code-2021
|
57c1b11c3a83ed75520875f4811f3bb1e9ebc950
|
[
"MIT"
] | 1
|
2021-12-03T20:11:51.000Z
|
2021-12-03T20:11:51.000Z
|
src/day20.py
|
StenAL/Advent-of-Code-2021
|
57c1b11c3a83ed75520875f4811f3bb1e9ebc950
|
[
"MIT"
] | null | null | null |
src/day20.py
|
StenAL/Advent-of-Code-2021
|
57c1b11c3a83ed75520875f4811f3bb1e9ebc950
|
[
"MIT"
] | null | null | null |
from util import *
from collections import *
import copy
from functools import reduce
from math import prod
day = 20
def get_neighbors(p):
(x, y) = p
neighbors = []
for d1 in range(-1, 2):
for d2 in range(-1, 2):
neighbors.append((x + d2, y + d1))
# print(p, neighbors)
return neighbors
def task1():
data = get_input_for_day(day)
# data = get_input_for_file("test2")
mode = "algorithm"
algorithm = ""
y = -1
points = dict()
for line in data:
if mode == "algorithm":
algorithm = line
mode = "image"
continue
if mode == "image":
for x in range(len(line)):
if line[x] == "#":
points[(x, y)] = 1
else:
points[(x, y)] = 0
y += 1
iterations = 2
for i in range(iterations):
new_points = points.copy()
seen = set()
for point in points:
neighbors = get_neighbors(point)
for p in neighbors:
if p in seen:
continue
neighbors2 = get_neighbors(p)
index = []
for n in neighbors2:
if n in points and points[n] == 1:
index.append("1")
continue
if n in points and points[n] == 0:
index.append("0")
continue
if i % 2 == 1:
index.append("1")
continue
index.append("0")
index_int = int("".join(index), 2)
value = algorithm[index_int]
new_points[p] = 1 if value == "#" else 0
seen.add(p)
points = new_points
lit = [k for k, v in points.items() if v == 1]
ans = len(lit)
print(ans)
def task2():
data = get_input_for_day(day)
# data = get_input_for_file("test2")
mode = "algorithm"
algorithm = ""
y = -1
points = dict()
for line in data:
if mode == "algorithm":
algorithm = line
mode = "image"
continue
if mode == "image":
for x in range(len(line)):
if line[x] == "#":
points[(x, y)] = 1
else:
points[(x, y)] = 0
y += 1
iterations = 50
for i in range(iterations):
new_points = points.copy()
seen = set()
for point in points:
neighbors = get_neighbors(point)
for p in neighbors:
if p in seen:
continue
neighbors2 = get_neighbors(p)
index = []
for n in neighbors2:
if n in points and points[n] == 1:
index.append("1")
continue
if n in points and points[n] == 0:
index.append("0")
continue
if i % 2 == 1:
index.append("1")
continue
index.append("0")
index_int = int("".join(index), 2)
value = algorithm[index_int]
new_points[p] = 1 if value == "#" else 0
seen.add(p)
points = new_points
print(i)
lit = [k for k, v in points.items() if v == 1]
ans = len(lit)
print(ans)
# task1()
task2()
| 27.469231
| 56
| 0.423411
| 393
| 3,571
| 3.778626
| 0.16285
| 0.043098
| 0.032323
| 0.040404
| 0.841751
| 0.841751
| 0.841751
| 0.841751
| 0.841751
| 0.841751
| 0
| 0.029286
| 0.474097
| 3,571
| 129
| 57
| 27.682171
| 0.761448
| 0.027163
| 0
| 0.828829
| 0
| 0
| 0.019602
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.027027
| false
| 0
| 0.045045
| 0
| 0.081081
| 0.027027
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
337102171abebaf4acbfbc386e536620a9a1ed3e
| 118
|
py
|
Python
|
boa3_test/test_sc/native_test/stdlib/AtoiTooManyArguments.py
|
OnBlockIO/neo3-boa
|
cb317292a67532a52ed26f2b0f0f7d0b10ac5f5f
|
[
"Apache-2.0"
] | 25
|
2020-07-22T19:37:43.000Z
|
2022-03-08T03:23:55.000Z
|
boa3_test/test_sc/native_test/stdlib/AtoiTooManyArguments.py
|
OnBlockIO/neo3-boa
|
cb317292a67532a52ed26f2b0f0f7d0b10ac5f5f
|
[
"Apache-2.0"
] | 419
|
2020-04-23T17:48:14.000Z
|
2022-03-31T13:17:45.000Z
|
boa3_test/test_sc/native_test/stdlib/AtoiTooManyArguments.py
|
OnBlockIO/neo3-boa
|
cb317292a67532a52ed26f2b0f0f7d0b10ac5f5f
|
[
"Apache-2.0"
] | 15
|
2020-05-21T21:54:24.000Z
|
2021-11-18T06:17:24.000Z
|
from boa3.builtin.nativecontract.stdlib import StdLib
def main() -> int:
return StdLib.atoi('100', 10, 'extra')
| 19.666667
| 53
| 0.70339
| 16
| 118
| 5.1875
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.06
| 0.152542
| 118
| 5
| 54
| 23.6
| 0.77
| 0
| 0
| 0
| 0
| 0
| 0.067797
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| true
| 0
| 0.333333
| 0.333333
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
683c747de8cb7c8365a153731ec6ab02962b07fe
| 1,328
|
py
|
Python
|
Solutions/ProjectEuler_08.py
|
Yassate/ProjectEuler
|
dd2a8402535f48d9ca5b45c6fbbe7f5359e088de
|
[
"MIT"
] | null | null | null |
Solutions/ProjectEuler_08.py
|
Yassate/ProjectEuler
|
dd2a8402535f48d9ca5b45c6fbbe7f5359e088de
|
[
"MIT"
] | null | null | null |
Solutions/ProjectEuler_08.py
|
Yassate/ProjectEuler
|
dd2a8402535f48d9ca5b45c6fbbe7f5359e088de
|
[
"MIT"
] | null | null | null |
number = "7316717653133062491922511967442657474235534919493496983520312774506326239578318016984801869478851843858615607891129494954595017379583319528532088055111254069874715852386305071569329096329522744304355766896648950445244523161731856403098711121722383113622298934233803081353362766142828064444866452387493035890729629049156044077239071381051585930796086670172427121883998797908792274921901699720888093776657273330010533678812202354218097512545405947522435258490771167055601360483958644670632441572215539753697817977846174064955149290862569321978468622482839722413756570560574902614079729686524145351004748216637048440319989000889524345065854122758866688116427171479924442928230863465674813919123162824586178664583591245665294765456828489128831426076900422421902267105562632111110937054421750694165896040807198403850962455444362981230987879927244284909188845801561660979191338754992005240636899125607176060588611646710940507754100225698315520005593572972571636269561882670428252483600823257530420752963450"
number = [int(digit) for digit in number]
print(type(number))
highest = 0
i = 0
product = 1
while i < len(number)-12:
for index in range(13):
product = product * number[i + index]
if product > highest:
highest = product
print(highest)
product = 1
i = i + 1
print(highest)
| 69.894737
| 1,011
| 0.899096
| 47
| 1,328
| 25.404255
| 0.425532
| 0.0134
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.818994
| 0.072289
| 1,328
| 18
| 1,012
| 73.777778
| 0.150162
| 0
| 0
| 0.266667
| 0
| 0
| 0.753012
| 0.753012
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.2
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
684f9c7dd67f02f8033d8c514f30eadf5d189ecc
| 159
|
py
|
Python
|
29/00/7.py
|
pylangstudy/201709
|
53d868786d7327a83bfa7f4149549c6f9855a6c6
|
[
"CC0-1.0"
] | null | null | null |
29/00/7.py
|
pylangstudy/201709
|
53d868786d7327a83bfa7f4149549c6f9855a6c6
|
[
"CC0-1.0"
] | 32
|
2017-09-01T00:52:17.000Z
|
2017-10-01T00:30:02.000Z
|
29/00/7.py
|
pylangstudy/201709
|
53d868786d7327a83bfa7f4149549c6f9855a6c6
|
[
"CC0-1.0"
] | null | null | null |
from decimal import *
print(Decimal('7.325').quantize(Decimal('.01'), rounding=ROUND_DOWN))
print(Decimal('7.325').quantize(Decimal('1.'), rounding=ROUND_UP))
| 39.75
| 69
| 0.72956
| 23
| 159
| 4.956522
| 0.565217
| 0.210526
| 0.22807
| 0.280702
| 0.54386
| 0.54386
| 0
| 0
| 0
| 0
| 0
| 0.072848
| 0.050314
| 159
| 3
| 70
| 53
| 0.682119
| 0
| 0
| 0
| 0
| 0
| 0.09434
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.333333
| 0
| 0.333333
| 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
685115484e83978dcb8357bd3b60e2f0ef41eff4
| 150
|
py
|
Python
|
foolbox/zoo/__init__.py
|
Gokkulnath/foolbox
|
74cf9051b56c0a7d31709b4127c6a2ce049f18bd
|
[
"MIT"
] | null | null | null |
foolbox/zoo/__init__.py
|
Gokkulnath/foolbox
|
74cf9051b56c0a7d31709b4127c6a2ce049f18bd
|
[
"MIT"
] | null | null | null |
foolbox/zoo/__init__.py
|
Gokkulnath/foolbox
|
74cf9051b56c0a7d31709b4127c6a2ce049f18bd
|
[
"MIT"
] | null | null | null |
from .zoo import get_model # noqa: F401
from .weights_fetcher import fetch_weights # noqa: F401
from .git_cloner import GitCloneError # noqa: F401
| 37.5
| 56
| 0.78
| 22
| 150
| 5.136364
| 0.590909
| 0.212389
| 0.212389
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.071429
| 0.16
| 150
| 3
| 57
| 50
| 0.825397
| 0.213333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
68747ba6811a3db757e0ba1e61076faf87aca6b7
| 3,805
|
py
|
Python
|
tests/contrib/epidemiology/test_seir.py
|
johannespitz/pyro
|
96d515474cfebe7addfcde575ac468cc64d15470
|
[
"Apache-2.0"
] | null | null | null |
tests/contrib/epidemiology/test_seir.py
|
johannespitz/pyro
|
96d515474cfebe7addfcde575ac468cc64d15470
|
[
"Apache-2.0"
] | null | null | null |
tests/contrib/epidemiology/test_seir.py
|
johannespitz/pyro
|
96d515474cfebe7addfcde575ac468cc64d15470
|
[
"Apache-2.0"
] | null | null | null |
# Copyright Contributors to the Pyro project.
# SPDX-License-Identifier: Apache-2.0
import pytest
import torch
import pyro.distributions as dist
from pyro.contrib.epidemiology import OverdispersedSEIRModel, SimpleSEIRModel
@pytest.mark.parametrize("duration", [3, 7])
@pytest.mark.parametrize("forecast", [0, 7])
@pytest.mark.parametrize("options", [
{},
{"dct": 1.},
{"num_quant_bins": 8},
], ids=str)
def test_simple_smoke(duration, forecast, options):
population = 100
incubation_time = 2.0
recovery_time = 7.0
# Generate data.
model = SimpleSEIRModel(population, incubation_time, recovery_time,
[None] * duration)
for attempt in range(100):
data = model.generate({"R0": 1.5, "rho": 0.5})["obs"]
if data.sum():
break
assert data.sum() > 0, "failed to generate positive data"
# Infer.
model = SimpleSEIRModel(population, incubation_time, recovery_time, data)
num_samples = 5
model.fit(warmup_steps=2, num_samples=num_samples, max_tree_depth=2,
**options)
# Predict and forecast.
samples = model.predict(forecast=forecast)
assert samples["S"].shape == (num_samples, duration + forecast)
assert samples["E"].shape == (num_samples, duration + forecast)
assert samples["I"].shape == (num_samples, duration + forecast)
@pytest.mark.parametrize("duration", [3, 7])
@pytest.mark.parametrize("forecast", [0, 7])
@pytest.mark.parametrize("options", [
{},
{"dct": 1.},
{"num_quant_bins": 8},
], ids=str)
def test_overdispersed_smoke(duration, forecast, options):
population = 100
incubation_time = 2.0
recovery_time = 7.0
# Generate data.
model = OverdispersedSEIRModel(
population, incubation_time, recovery_time, [None] * duration)
for attempt in range(100):
data = model.generate({"R0": 1.5, "rho": 0.5, "k": 1.0})["obs"]
if data.sum():
break
assert data.sum() > 0, "failed to generate positive data"
# Infer.
model = OverdispersedSEIRModel(
population, incubation_time, recovery_time, data)
num_samples = 5
model.fit(warmup_steps=2, num_samples=num_samples, max_tree_depth=2,
**options)
# Predict and forecast.
samples = model.predict(forecast=forecast)
assert samples["S"].shape == (num_samples, duration + forecast)
assert samples["E"].shape == (num_samples, duration + forecast)
assert samples["I"].shape == (num_samples, duration + forecast)
@pytest.mark.parametrize("duration", [3, 7])
@pytest.mark.parametrize("forecast", [0, 7])
def test_coalescent_likelihood_smoke(duration, forecast):
population = 100
incubation_time = 2.0
recovery_time = 7.0
# Generate data.
model = OverdispersedSEIRModel(
population, incubation_time, recovery_time, [None] * duration)
for attempt in range(100):
data = model.generate({"R0": 1.5, "rho": 0.5, "k": 1.0})["obs"]
if data.sum():
break
assert data.sum() > 0, "failed to generate positive data"
leaf_times = torch.rand(5).pow(0.5) * duration
coal_times = dist.CoalescentTimes(leaf_times).sample()
coal_times = coal_times[..., torch.randperm(coal_times.size(-1))]
# Infer.
model = OverdispersedSEIRModel(
population, incubation_time, recovery_time, data,
leaf_times=leaf_times, coal_times=coal_times)
num_samples = 5
model.fit(warmup_steps=2, num_samples=num_samples, max_tree_depth=2)
# Predict and forecast.
samples = model.predict(forecast=forecast)
assert samples["S"].shape == (num_samples, duration + forecast)
assert samples["E"].shape == (num_samples, duration + forecast)
assert samples["I"].shape == (num_samples, duration + forecast)
| 34.279279
| 77
| 0.661761
| 469
| 3,805
| 5.228145
| 0.196162
| 0.073409
| 0.07708
| 0.084421
| 0.832382
| 0.832382
| 0.832382
| 0.820147
| 0.820147
| 0.781811
| 0
| 0.027357
| 0.202628
| 3,805
| 110
| 78
| 34.590909
| 0.780817
| 0.055453
| 0
| 0.780488
| 0
| 0
| 0.063372
| 0
| 0
| 0
| 0
| 0
| 0.146341
| 1
| 0.036585
| false
| 0
| 0.04878
| 0
| 0.085366
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
68adfc2195cf83e1a0ce7ccc5cbac19ab68e25cb
| 108
|
py
|
Python
|
keras_squeezenet_tf2/__init__.py
|
ktokolwiek/keras_squeezenet2
|
6326d47a2eccf2a558f9847fbe9240e02c41c5ac
|
[
"MIT"
] | 3
|
2020-02-24T11:33:47.000Z
|
2021-05-30T03:23:12.000Z
|
keras_squeezenet_tf2/__init__.py
|
ktokolwiek/keras_squeezenet2
|
6326d47a2eccf2a558f9847fbe9240e02c41c5ac
|
[
"MIT"
] | 6
|
2021-03-19T03:48:24.000Z
|
2022-03-11T23:59:54.000Z
|
keras_squeezenet_tf2/__init__.py
|
ktokolwiek/keras_squeezenet2
|
6326d47a2eccf2a558f9847fbe9240e02c41c5ac
|
[
"MIT"
] | 3
|
2019-12-17T11:38:09.000Z
|
2021-11-25T07:54:01.000Z
|
from keras_squeezenet_tf2.squeezenet import SqueezeNet
from keras_squeezenet_tf2.version import __version__
| 36
| 54
| 0.907407
| 14
| 108
| 6.428571
| 0.428571
| 0.2
| 0.422222
| 0.488889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02
| 0.074074
| 108
| 2
| 55
| 54
| 0.88
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
d7d117c08a130cafc00fa9d3da317131660184e9
| 438
|
py
|
Python
|
odin/fuel/image_data/__init__.py
|
trungnt13/odin-ai
|
9c6986a854e62da39637ea463667841378b7dd84
|
[
"MIT"
] | 7
|
2020-12-29T19:35:58.000Z
|
2022-01-31T21:01:30.000Z
|
odin/fuel/image_data/__init__.py
|
imito/odin-ai
|
9c6986a854e62da39637ea463667841378b7dd84
|
[
"MIT"
] | 3
|
2020-02-06T16:44:17.000Z
|
2020-09-26T05:26:14.000Z
|
odin/fuel/image_data/__init__.py
|
trungnt13/odin-ai
|
9c6986a854e62da39637ea463667841378b7dd84
|
[
"MIT"
] | 6
|
2019-02-14T01:36:28.000Z
|
2020-10-30T13:16:32.000Z
|
from odin.fuel.image_data._base import ImageDataset
from odin.fuel.image_data.all_mnist import *
from odin.fuel.image_data.celeba import *
from odin.fuel.image_data.cifar import *
from odin.fuel.image_data.lego_faces import LegoFaces
from odin.fuel.image_data.shapes import *
# from odin.fuel.image_data.synthesize import YDisentanglement
from odin.fuel.image_data.omniglot import *
from odin.fuel.image_data.toys import *
# TODO: STL10
| 36.5
| 62
| 0.824201
| 70
| 438
| 4.985714
| 0.314286
| 0.206304
| 0.309456
| 0.438395
| 0.627507
| 0.386819
| 0
| 0
| 0
| 0
| 0
| 0.005038
| 0.093607
| 438
| 11
| 63
| 39.818182
| 0.874055
| 0.164384
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
cc0b7ab07b911f17b8852989c8ac498f1abdae3a
| 99
|
py
|
Python
|
app/models/__init__.py
|
boceckts/ideahub
|
fbd48c53a5aaf7252a5461d0c0d2fe9d4eef9aed
|
[
"BSD-3-Clause"
] | null | null | null |
app/models/__init__.py
|
boceckts/ideahub
|
fbd48c53a5aaf7252a5461d0c0d2fe9d4eef9aed
|
[
"BSD-3-Clause"
] | null | null | null |
app/models/__init__.py
|
boceckts/ideahub
|
fbd48c53a5aaf7252a5461d0c0d2fe9d4eef9aed
|
[
"BSD-3-Clause"
] | null | null | null |
from app.models.vote import Vote
from app.models.idea import Idea
from app.models.user import User
| 24.75
| 32
| 0.818182
| 18
| 99
| 4.5
| 0.388889
| 0.259259
| 0.481481
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.121212
| 99
| 3
| 33
| 33
| 0.931034
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
0be39c7699cff1addf16901ddef9976b1a6bed26
| 6,407
|
py
|
Python
|
loldib/getratings/models/NA/na_quinn/na_quinn_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_quinn/na_quinn_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_quinn/na_quinn_top.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from getratings.models.ratings import Ratings
class NA_Quinn_Top_Aatrox(Ratings):
pass
class NA_Quinn_Top_Ahri(Ratings):
pass
class NA_Quinn_Top_Akali(Ratings):
pass
class NA_Quinn_Top_Alistar(Ratings):
pass
class NA_Quinn_Top_Amumu(Ratings):
pass
class NA_Quinn_Top_Anivia(Ratings):
pass
class NA_Quinn_Top_Annie(Ratings):
pass
class NA_Quinn_Top_Ashe(Ratings):
pass
class NA_Quinn_Top_AurelionSol(Ratings):
pass
class NA_Quinn_Top_Azir(Ratings):
pass
class NA_Quinn_Top_Bard(Ratings):
pass
class NA_Quinn_Top_Blitzcrank(Ratings):
pass
class NA_Quinn_Top_Brand(Ratings):
pass
class NA_Quinn_Top_Braum(Ratings):
pass
class NA_Quinn_Top_Caitlyn(Ratings):
pass
class NA_Quinn_Top_Camille(Ratings):
pass
class NA_Quinn_Top_Cassiopeia(Ratings):
pass
class NA_Quinn_Top_Chogath(Ratings):
pass
class NA_Quinn_Top_Corki(Ratings):
pass
class NA_Quinn_Top_Darius(Ratings):
pass
class NA_Quinn_Top_Diana(Ratings):
pass
class NA_Quinn_Top_Draven(Ratings):
pass
class NA_Quinn_Top_DrMundo(Ratings):
pass
class NA_Quinn_Top_Ekko(Ratings):
pass
class NA_Quinn_Top_Elise(Ratings):
pass
class NA_Quinn_Top_Evelynn(Ratings):
pass
class NA_Quinn_Top_Ezreal(Ratings):
pass
class NA_Quinn_Top_Fiddlesticks(Ratings):
pass
class NA_Quinn_Top_Fiora(Ratings):
pass
class NA_Quinn_Top_Fizz(Ratings):
pass
class NA_Quinn_Top_Galio(Ratings):
pass
class NA_Quinn_Top_Gangplank(Ratings):
pass
class NA_Quinn_Top_Garen(Ratings):
pass
class NA_Quinn_Top_Gnar(Ratings):
pass
class NA_Quinn_Top_Gragas(Ratings):
pass
class NA_Quinn_Top_Graves(Ratings):
pass
class NA_Quinn_Top_Hecarim(Ratings):
pass
class NA_Quinn_Top_Heimerdinger(Ratings):
pass
class NA_Quinn_Top_Illaoi(Ratings):
pass
class NA_Quinn_Top_Irelia(Ratings):
pass
class NA_Quinn_Top_Ivern(Ratings):
pass
class NA_Quinn_Top_Janna(Ratings):
pass
class NA_Quinn_Top_JarvanIV(Ratings):
pass
class NA_Quinn_Top_Jax(Ratings):
pass
class NA_Quinn_Top_Jayce(Ratings):
pass
class NA_Quinn_Top_Jhin(Ratings):
pass
class NA_Quinn_Top_Jinx(Ratings):
pass
class NA_Quinn_Top_Kalista(Ratings):
pass
class NA_Quinn_Top_Karma(Ratings):
pass
class NA_Quinn_Top_Karthus(Ratings):
pass
class NA_Quinn_Top_Kassadin(Ratings):
pass
class NA_Quinn_Top_Katarina(Ratings):
pass
class NA_Quinn_Top_Kayle(Ratings):
pass
class NA_Quinn_Top_Kayn(Ratings):
pass
class NA_Quinn_Top_Kennen(Ratings):
pass
class NA_Quinn_Top_Khazix(Ratings):
pass
class NA_Quinn_Top_Kindred(Ratings):
pass
class NA_Quinn_Top_Kled(Ratings):
pass
class NA_Quinn_Top_KogMaw(Ratings):
pass
class NA_Quinn_Top_Leblanc(Ratings):
pass
class NA_Quinn_Top_LeeSin(Ratings):
pass
class NA_Quinn_Top_Leona(Ratings):
pass
class NA_Quinn_Top_Lissandra(Ratings):
pass
class NA_Quinn_Top_Lucian(Ratings):
pass
class NA_Quinn_Top_Lulu(Ratings):
pass
class NA_Quinn_Top_Lux(Ratings):
pass
class NA_Quinn_Top_Malphite(Ratings):
pass
class NA_Quinn_Top_Malzahar(Ratings):
pass
class NA_Quinn_Top_Maokai(Ratings):
pass
class NA_Quinn_Top_MasterYi(Ratings):
pass
class NA_Quinn_Top_MissFortune(Ratings):
pass
class NA_Quinn_Top_MonkeyKing(Ratings):
pass
class NA_Quinn_Top_Mordekaiser(Ratings):
pass
class NA_Quinn_Top_Morgana(Ratings):
pass
class NA_Quinn_Top_Nami(Ratings):
pass
class NA_Quinn_Top_Nasus(Ratings):
pass
class NA_Quinn_Top_Nautilus(Ratings):
pass
class NA_Quinn_Top_Nidalee(Ratings):
pass
class NA_Quinn_Top_Nocturne(Ratings):
pass
class NA_Quinn_Top_Nunu(Ratings):
pass
class NA_Quinn_Top_Olaf(Ratings):
pass
class NA_Quinn_Top_Orianna(Ratings):
pass
class NA_Quinn_Top_Ornn(Ratings):
pass
class NA_Quinn_Top_Pantheon(Ratings):
pass
class NA_Quinn_Top_Poppy(Ratings):
pass
class NA_Quinn_Top_Quinn(Ratings):
pass
class NA_Quinn_Top_Rakan(Ratings):
pass
class NA_Quinn_Top_Rammus(Ratings):
pass
class NA_Quinn_Top_RekSai(Ratings):
pass
class NA_Quinn_Top_Renekton(Ratings):
pass
class NA_Quinn_Top_Rengar(Ratings):
pass
class NA_Quinn_Top_Riven(Ratings):
pass
class NA_Quinn_Top_Rumble(Ratings):
pass
class NA_Quinn_Top_Ryze(Ratings):
pass
class NA_Quinn_Top_Sejuani(Ratings):
pass
class NA_Quinn_Top_Shaco(Ratings):
pass
class NA_Quinn_Top_Shen(Ratings):
pass
class NA_Quinn_Top_Shyvana(Ratings):
pass
class NA_Quinn_Top_Singed(Ratings):
pass
class NA_Quinn_Top_Sion(Ratings):
pass
class NA_Quinn_Top_Sivir(Ratings):
pass
class NA_Quinn_Top_Skarner(Ratings):
pass
class NA_Quinn_Top_Sona(Ratings):
pass
class NA_Quinn_Top_Soraka(Ratings):
pass
class NA_Quinn_Top_Swain(Ratings):
pass
class NA_Quinn_Top_Syndra(Ratings):
pass
class NA_Quinn_Top_TahmKench(Ratings):
pass
class NA_Quinn_Top_Taliyah(Ratings):
pass
class NA_Quinn_Top_Talon(Ratings):
pass
class NA_Quinn_Top_Taric(Ratings):
pass
class NA_Quinn_Top_Teemo(Ratings):
pass
class NA_Quinn_Top_Thresh(Ratings):
pass
class NA_Quinn_Top_Tristana(Ratings):
pass
class NA_Quinn_Top_Trundle(Ratings):
pass
class NA_Quinn_Top_Tryndamere(Ratings):
pass
class NA_Quinn_Top_TwistedFate(Ratings):
pass
class NA_Quinn_Top_Twitch(Ratings):
pass
class NA_Quinn_Top_Udyr(Ratings):
pass
class NA_Quinn_Top_Urgot(Ratings):
pass
class NA_Quinn_Top_Varus(Ratings):
pass
class NA_Quinn_Top_Vayne(Ratings):
pass
class NA_Quinn_Top_Veigar(Ratings):
pass
class NA_Quinn_Top_Velkoz(Ratings):
pass
class NA_Quinn_Top_Vi(Ratings):
pass
class NA_Quinn_Top_Viktor(Ratings):
pass
class NA_Quinn_Top_Vladimir(Ratings):
pass
class NA_Quinn_Top_Volibear(Ratings):
pass
class NA_Quinn_Top_Warwick(Ratings):
pass
class NA_Quinn_Top_Xayah(Ratings):
pass
class NA_Quinn_Top_Xerath(Ratings):
pass
class NA_Quinn_Top_XinZhao(Ratings):
pass
class NA_Quinn_Top_Yasuo(Ratings):
pass
class NA_Quinn_Top_Yorick(Ratings):
pass
class NA_Quinn_Top_Zac(Ratings):
pass
class NA_Quinn_Top_Zed(Ratings):
pass
class NA_Quinn_Top_Ziggs(Ratings):
pass
class NA_Quinn_Top_Zilean(Ratings):
pass
class NA_Quinn_Top_Zyra(Ratings):
pass
| 15.364508
| 46
| 0.761667
| 972
| 6,407
| 4.59465
| 0.151235
| 0.216301
| 0.370802
| 0.463502
| 0.797582
| 0.797582
| 0
| 0
| 0
| 0
| 0
| 0
| 0.173404
| 6,407
| 416
| 47
| 15.401442
| 0.843278
| 0
| 0
| 0.498195
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.498195
| 0.00361
| 0
| 0.501805
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 0
|
0
| 7
|
04156e7ba9654d496501be017f39f3fa2aa19d2c
| 3,547
|
py
|
Python
|
tcrdist/tests/test_vdj_funcs.py
|
zozo123/tcrdist3
|
49c6554f16ad7f20f50d7303a8ac75268f5f601f
|
[
"MIT"
] | 26
|
2020-12-28T17:37:01.000Z
|
2022-01-29T01:31:13.000Z
|
tcrdist/tests/test_vdj_funcs.py
|
zozo123/tcrdist3
|
49c6554f16ad7f20f50d7303a8ac75268f5f601f
|
[
"MIT"
] | 31
|
2020-08-17T22:17:57.000Z
|
2022-03-18T23:47:34.000Z
|
tcrdist/tests/test_vdj_funcs.py
|
zozo123/tcrdist3
|
49c6554f16ad7f20f50d7303a8ac75268f5f601f
|
[
"MIT"
] | 7
|
2020-08-18T23:55:40.000Z
|
2021-09-22T18:15:54.000Z
|
import pytest
def test_import_vdjtools_beta_w_validation():
import pandas as pd
import numpy as np
import os
from tcrdist.paths import path_to_base
from tcrdist.vdjtools_funcs import import_vdjtools
from tcrdist.repertoire import TCRrep
# Reformat vdj_tools input format for tcrdist3
vdj_tools_file_beta = os.path.join(path_to_base, 'tcrdist','data','formats','vdj.M_15_CD8_beta.clonotypes.TRB.txt.gz')
df_beta = import_vdjtools( vdj_tools_file = vdj_tools_file_beta ,
chain = 'beta',
organism = 'human',
db_file = 'alphabeta_gammadelta_db.tsv',
validate = True)
assert np.all(df_beta.columns == ['count', 'freq', 'cdr3_b_aa', 'v_b_gene', 'j_b_gene', 'cdr3_b_nucseq','valid_v', 'valid_j', 'valid_cdr3'])
# Can be directly imported into a TCRrep instance.
tr = TCRrep(
cell_df = df_beta[['count', 'freq', 'cdr3_b_aa', 'v_b_gene', 'j_b_gene']],
chains = ['beta'],
organism = 'human',
compute_distances = False)
def test_import_vdjtools_beta_no_validation():
import pandas as pd
import numpy as np
import os
from tcrdist.paths import path_to_base
from tcrdist.vdjtools_funcs import import_vdjtools
vdj_tools_file_beta = os.path.join(path_to_base, 'tcrdist','data','formats','vdj.M_15_CD8_beta.clonotypes.TRB.txt.gz')
df_beta = import_vdjtools( vdj_tools_file = vdj_tools_file_beta ,
chain = 'beta',
organism = 'human',
db_file = 'alphabeta_gammadelta_db.tsv',
validate = False)
assert np.all(df_beta.columns == ['count', 'freq', 'cdr3_b_aa', 'v_b_gene', 'j_b_gene', 'cdr3_b_nucseq','valid_v', 'valid_j', 'valid_cdr3'])
assert False in df_beta.valid_cdr3
assert False in df_beta.valid_v
assert False in df_beta.valid_j
def test_import_vdjtools_alpha_w_validation():
import pandas as pd
import numpy as np
import os
from tcrdist.paths import path_to_base
from tcrdist.vdjtools_funcs import import_vdjtools
vdj_tools_file_alpha = os.path.join(path_to_base, 'tcrdist','data','formats','vdj.M_15_CD8_alpha.clonotypes.TRA.txt.gz')
df_alpha = import_vdjtools( vdj_tools_file = vdj_tools_file_alpha,
chain = 'alpha',
organism = 'human',
db_file = 'alphabeta_gammadelta_db.tsv',
validate = True)
assert np.all(df_alpha.columns == ['count', 'freq', 'cdr3_a_aa', 'v_a_gene', 'j_a_gene', 'cdr3_a_nucseq','valid_v', 'valid_j', 'valid_cdr3'])
def test_import_vdjtools_alpha_no_validation():
import pandas as pd
import numpy as np
import os
from tcrdist.paths import path_to_base
from tcrdist.vdjtools_funcs import import_vdjtools
vdj_tools_file_alpha = os.path.join(path_to_base, 'tcrdist','data','formats','vdj.M_15_CD8_alpha.clonotypes.TRA.txt.gz')
df_alpha = import_vdjtools( vdj_tools_file = vdj_tools_file_alpha,
chain = 'alpha',
organism = 'human',
db_file = 'alphabeta_gammadelta_db.tsv',
validate = False)
assert np.all(df_alpha.columns == ['count', 'freq', 'cdr3_a_aa', 'v_a_gene', 'j_a_gene', 'cdr3_a_nucseq','valid_v', 'valid_j', 'valid_cdr3'])
assert False in df_alpha.valid_cdr3
assert False in df_alpha.valid_v
assert False in df_alpha.valid_j
| 44.898734
| 145
| 0.647871
| 496
| 3,547
| 4.274194
| 0.163306
| 0.049057
| 0.067925
| 0.072642
| 0.910849
| 0.862736
| 0.838679
| 0.838679
| 0.80283
| 0.80283
| 0
| 0.010518
| 0.249507
| 3,547
| 78
| 146
| 45.474359
| 0.785875
| 0.026219
| 0
| 0.753846
| 0
| 0
| 0.203768
| 0.077101
| 0
| 0
| 0
| 0
| 0.153846
| 1
| 0.061538
| false
| 0
| 0.461538
| 0
| 0.523077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
f0870385c56319ffabf67174a18da7ffcb706c91
| 6,130
|
py
|
Python
|
src/tests/test_dataset_utils.py
|
bowang-lab/Transformer-GCN-QA
|
7b0a5f5a005ebaae75796c200035f7e6ce175971
|
[
"MIT"
] | 11
|
2019-08-30T07:39:55.000Z
|
2022-03-24T09:29:35.000Z
|
src/tests/test_dataset_utils.py
|
berc-uoft/Transformer-GCN-QA
|
7b0a5f5a005ebaae75796c200035f7e6ce175971
|
[
"MIT"
] | 27
|
2019-04-19T14:59:09.000Z
|
2019-06-18T15:51:51.000Z
|
src/tests/test_dataset_utils.py
|
bowang-lab/Transformer-GCN-QA
|
7b0a5f5a005ebaae75796c200035f7e6ce175971
|
[
"MIT"
] | 1
|
2019-04-19T19:20:23.000Z
|
2019-04-19T19:20:23.000Z
|
from ..utils import dataset_utils
class TestDatasetUtils(object):
"""Collects all unit tests for `utils.model_utils`.
"""
def test_load_wikihop(self, dataset):
"""Asserts that `data_utils.load_wikihop()` returns the expected value when `masked == True`.
"""
expected = {'train': [
{
"id": "WH_train_0",
"query": "participant_of juan rossell",
"answer": "1996 summer olympics",
"candidates": [
"1996 summer olympics",
"olympic games",
"sport"
],
"supports": [
"Juan Miguel Rossell Milanes ( born December 28 , 1969 in Jiguani , Granma ) is a beach volleyball player from Cuba , who won the gold medal in the men 's beach team competition at the 2003 Pan American Games in Santo Domingo , Dominican Republic , partnering Francisco Alvarez . He represented his native country at the 1996 and the 2004 Summer Olympics ."
]
},
{
"id": "WH_train_1",
"query": "languages_spoken_or_written john osteen",
"answer": "english",
"candidates": [
"english",
"greek",
"koine greek",
"nahuatl",
"spanish"
],
"supports": [
"Lakewood Church is a nondenominational charismatic Christian megachurch located in Houston, Texas. It is the largest congregation in the United States, averaging about 52,000 attendees per week. The 16,800-seat Lakewood Church Central Campus, home to four English-language services and two Spanish-language services per week, is located at the former Compaq Center. Joel Osteen is the senior pastor of Lakewood Church with his wife, Victoria, who serves as co-pastor. Lakewood Church is a part of the Word of Faith movement.",
"Mexico (, modern Nahuatl ), officially the United Mexican States, is a federal republic in the southern half of North America. It is bordered to the north by the United States; to the south and west by the Pacific Ocean; to the southeast by Guatemala, Belize, and the Caribbean Sea; and to the east by the Gulf of Mexico. Covering almost two million square kilometers (over 760,000\u00a0sq\u00a0mi), Mexico is the sixth largest country in the Americas by total area and the 13th largest independent nation in the world. With an estimated population of over 120 million, it is the eleventh most populous country and the most populous Spanish-speaking country in the world while being the second most populous country in Latin America. Mexico is a federation comprising 31 states and a federal district that is also its capital and most populous city. Other metropolises include Guadalajara, Monterrey, Puebla, Toluca, Tijuana and Le\u00f3n."
]
}
]}
actual = dataset
assert expected == actual
def test_load_wikihop_masked(self, masked_dataset):
"""Asserts that `data_utils.load_wikihop()` returns the expected value when `masked == False`.
"""
expected = {'train.masked': [
{
"id": "WH_train_0",
"query": "participant_of juan rossell",
"answer": "___MASK3___",
"candidates": [
"___MASK3___",
"___MASK63___",
"___MASK83___"
],
"supports": [
"Juan Miguel Rossell Milanes ( born December 28 , 1969 in Jiguani , Granma ) is a beach volleyball player from Cuba , who won the gold medal in the men ' s beach team competition at the 2003 Pan American Games in Santo Domingo , Dominican Republic , partnering Francisco Alvarez . He represented his native country at the 1996 and the 2004 Summer Olympics ."
]
},
{
"id": "WH_train_1",
"query": "languages_spoken_or_written john osteen",
"answer": "___MASK46___",
"candidates": [
"___MASK15___",
"___MASK25___",
"___MASK46___",
"___MASK67___",
"___MASK85___"
],
"supports": [
"Lakewood Church is a nondenominational charismatic Christian megachurch located in Houston , Texas . It is the largest congregation in the United States , averaging about 52 , 000 attendees per week . The 16 , 800 - seat Lakewood Church Central Campus , home to four ___MASK46___ - language services and two Spanish - language services per week , is located at the former Compaq Center . Joel Osteen is the senior pastor of Lakewood Church with his wife , Victoria , who serves as co - pastor . Lakewood Church is a part of the Word of Faith movement .",
"Mexico (, modern ___MASK67___ ), officially the United Mexican States , is a federal republic in the southern half of North America . It is bordered to the north by the United States ; to the south and west by the Pacific Ocean ; to the southeast by Guatemala , Belize , and the Caribbean Sea ; and to the east by the Gulf of Mexico . Covering almost two million square kilometers ( over 760 , 000 sq mi ), Mexico is the sixth largest country in the Americas by total area and the 13th largest independent nation in the world . With an estimated population of over 120 million , it is the eleventh most populous country and the most populous ___MASK25___ - speaking country in the world while being the second most populous country in Latin America . Mexico is a federation comprising 31 states and a federal district that is also its capital and most populous city . Other metropolises include Guadalajara , Monterrey , Puebla , Toluca , Tijuana and Le\u00f3n ."
]
}
]}
actual = masked_dataset
assert expected == actual
| 72.97619
| 984
| 0.629527
| 744
| 6,130
| 5.043011
| 0.307796
| 0.015991
| 0.009595
| 0.018124
| 0.866738
| 0.866738
| 0.866738
| 0.866738
| 0.866738
| 0.866738
| 0
| 0.030726
| 0.309788
| 6,130
| 83
| 985
| 73.855422
| 0.856062
| 0.039967
| 0
| 0.371429
| 0
| 0.085714
| 0.720307
| 0.013481
| 0
| 0
| 0
| 0
| 0.028571
| 1
| 0.028571
| false
| 0
| 0.014286
| 0
| 0.057143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0b0e7a5882ea8502e7ef01bb04dff9ddf72af5b8
| 11,867
|
py
|
Python
|
z3tracker/ruleset/vt8/ganonstower.py
|
compiling/z3-tracker
|
4bf7e4c7bca6f4e0ac51bede2208d8f19022bab0
|
[
"MIT"
] | null | null | null |
z3tracker/ruleset/vt8/ganonstower.py
|
compiling/z3-tracker
|
4bf7e4c7bca6f4e0ac51bede2208d8f19022bab0
|
[
"MIT"
] | null | null | null |
z3tracker/ruleset/vt8/ganonstower.py
|
compiling/z3-tracker
|
4bf7e4c7bca6f4e0ac51bede2208d8f19022bab0
|
[
"MIT"
] | null | null | null |
'''
Ganon's Tower
'''
__all__ = 'LOCATIONS',
LOCATIONS = {
"Ganon's Tower Entrance (I)": {
'type': 'interior',
'link': {
'Castle Tower Entrance (E)': [('settings', 'inverted')],
"Ganon's Tower Entrance (E)": [('nosettings', 'inverted')],
"Ganon's Tower Lobby": [('and', [
('or', [
('settings', 'placement_advanced'),
('and', [
('or', [
('settings', 'swordless'),
('item', 'mastersword')]),
('or', [
('item', 'bottle'), ('item', 'bluemail')])])]),
('rabbitbarrier', None)])]}
},
"Ganon's Tower Lobby": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Entrance (I)": [],
"Ganon's Tower Torch Key Room": [],
"Ganon's Tower Trap Room": [],
"Ganon's Tower Ascent 1": [('bigkey', "Ganon's Tower")]}
},
"Ganon's Tower Torch Key Room": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Lobby": [],
"Ganon's Tower Torch Key": [('item', 'pegasus')],
"Ganon's Tower Moving Bumper Key": []}
},
"Ganon's Tower Torch Key": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Torch Key Room": []}
},
"Ganon's Tower Moving Bumper Key": {
'type': 'dungeonkey', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Torch Key Room": [],
"Ganon's Tower Pit Room": [('item', 'hammer')]}
},
"Ganon's Tower Pit Room": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Moving Bumper Key": [('item', 'hookshot')],
"Ganon's Tower Stalfos Room": [('item', 'hookshot')],
"Ganon's Tower Map": [('item', 'hookshot'), ('item', 'pegasus')]}
},
"Ganon's Tower Stalfos Room": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Pit Room": [
('item', 'hookshot'), ('item', 'pegasus')],
"Ganon's Tower Stalfos Room Chest 1": [],
"Ganon's Tower Stalfos Room Chest 2": [],
"Ganon's Tower Stalfos Room Chest 3": [],
"Ganon's Tower Stalfos Room Chest 4": []}
},
"Ganon's Tower Stalfos Room Chest 1": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Stalfos Room": []}
},
"Ganon's Tower Stalfos Room Chest 2": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Stalfos Room": []}
},
"Ganon's Tower Stalfos Room Chest 3": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Stalfos Room": []}
},
"Ganon's Tower Stalfos Room Chest 4": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Stalfos Room": []}
},
"Ganon's Tower Map": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Pit Room": [('item', 'hookshot')],
"Ganon's Tower Switch Key": []}
},
"Ganon's Tower Switch Key": {
'type': 'dungeonkey', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Map": [],
"Ganon's Tower Winder Room": [('item', 'hookshot')]}
},
"Ganon's Tower Winder Room": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Secret Treasure": [('item', 'bombs')],
"Ganon's Tower Convergence": []}
},
"Ganon's Tower Secret Treasure": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Secret Chest 1": [],
"Ganon's Tower Secret Chest 2": [],
"Ganon's Tower Secret Chest 3": [],
"Ganon's Tower Secret Chest 4": [],
"Ganon's Tower Convergence": []}
},
"Ganon's Tower Secret Chest 1": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Secret Treasure": []}
},
"Ganon's Tower Secret Chest 2": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Secret Treasure": []}
},
"Ganon's Tower Secret Chest 3": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Secret Treasure": []}
},
"Ganon's Tower Secret Chest 4": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Secret Treasure": []}
},
"Ganon's Tower Trap Room": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Lobby": [],
"Ganon's Tower Trap Chest 1": [],
"Ganon's Tower Trap Chest 2": [],
"Ganon's Tower Tile Room": [('item', 'somaria')]}
},
"Ganon's Tower Trap Chest 1": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Trap Room": []}
},
"Ganon's Tower Trap Chest 2": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Trap Room": []}
},
"Ganon's Tower Tile Room": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Trap Room": [],
"Ganon's Tower Torch Race": []}
},
"Ganon's Tower Torch Race": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Tile Room": [],
"Ganon's Tower Compass Room": [('item', 'firerod')]}
},
"Ganon's Tower Compass Room": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Torch Race": [],
"Ganon's Tower Compass Chest 1": [],
"Ganon's Tower Compass Chest 2": [],
"Ganon's Tower Compass Chest 3": [],
"Ganon's Tower Compass Chest 4": [],
"Ganon's Tower Obstacle Course Key": []}
},
"Ganon's Tower Compass Chest 1": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Compass Room": []}
},
"Ganon's Tower Compass Chest 2": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Compass Room": []}
},
"Ganon's Tower Compass Chest 3": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Compass Room": []}
},
"Ganon's Tower Compass Chest 4": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Compass Room": []}
},
"Ganon's Tower Obstacle Course Key": {
'type': 'dungeonkey', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Convergence": []}
},
"Ganon's Tower Convergence": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Anti-Fairy Room": [],
"Ganon's Tower Treasure": []}
},
"Ganon's Tower Anti-Fairy Room": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Convergence": [],
"Ganon's Tower Armos On Ice": [('item', 'bombs')]}
},
"Ganon's Tower Armos On Ice": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Big Key Room": [],
"Ganon's Tower Treasure": []}
},
"Ganon's Tower Big Key Room": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Armos On Ice": [],
"Ganon's Tower Big Key Chest 1": [],
"Ganon's Tower Big Key Chest 2": [],
"Ganon's Tower Big Key Chest 3": []}
},
"Ganon's Tower Big Key Chest 1": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Big Key Room": []}
},
"Ganon's Tower Big Key Chest 2": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Big Key Room": []}
},
"Ganon's Tower Big Key Chest 3": {
'type': 'dungeonchest', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Big Key Room": []}
},
"Ganon's Tower Treasure": {
'type': 'dungeonchest_nokey', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Convergence": [],
"Ganon's Tower Torch Key Room": []}
},
"Ganon's Tower Ascent 1": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Lobby": [],
"Ganon's Tower Ascent 2": [('item', 'bow')]}
},
"Ganon's Tower Ascent 2": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Ascent 1": [('item', 'bow')],
"Ganon's Tower Ascent 3": [
('item', 'lantern'), ('item', 'firerod')]}
},
"Ganon's Tower Ascent 3": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Ascent 2": [],
"Ganon's Tower Helmasaur Key": [],
"Ganon's Tower Helmasaur Chest 1": [],
"Ganon's Tower Helmasaur Chest 2": [],
"Ganon's Tower Ascent 4": []}
},
"Ganon's Tower Helmasaur Key": {
'type': 'dungeonkey', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Ascent 3": []}
},
"Ganon's Tower Helmasaur Chest 1": {
'type': 'dungeonchest_nokey', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Ascent 3": []}
},
"Ganon's Tower Helmasaur Chest 2": {
'type': 'dungeonchest_nokey', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Ascent 3": []}
},
"Ganon's Tower Ascent 4": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Ascent 3": [],
"Ganon's Tower Rabbit Beam Chest": [],
"Ganon's Tower Ascent 5": []}
},
"Ganon's Tower Rabbit Beam Chest": {
'type': 'dungeonchest_nokey', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Ascent 4": []}
},
"Ganon's Tower Ascent 5": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Ascent 4": [],
"Ganon's Tower Ascent 6": [('item', 'hookshot')]}
},
"Ganon's Tower Ascent 6": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Last Chest": [],
"Ganon's Tower Boss": []}
},
"Ganon's Tower Last Chest": {
'type': 'dungeonchest_nokey', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Ascent 6": []}
},
"Ganon's Tower Boss": {
'type': 'dungeonboss', "dungeon": "Ganon's Tower",
'link': {
"Ganon's Tower Boss Item": [
('item', 'sword'),
('and', [
('settings', 'swordless'), ('item', 'hammer')]),
('item', 'bugnet')]}
},
"Ganon's Tower Boss Item": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
"Ganon's Tower Reward": []}
},
"Ganon's Tower Reward": {
'type': 'area', 'dungeon': "Ganon's Tower",
'link': {
'Pyramid': [('nosettings', 'inverted')],
'Castle Walls': [('settings', 'inverted')]}
},
}
| 35.109467
| 77
| 0.466925
| 1,258
| 11,867
| 4.396661
| 0.067568
| 0.220213
| 0.403724
| 0.165974
| 0.910324
| 0.826794
| 0.713072
| 0.67185
| 0.628639
| 0.602965
| 0
| 0.007564
| 0.342715
| 11,867
| 337
| 78
| 35.21365
| 0.701538
| 0.001095
| 0
| 0.443425
| 0
| 0
| 0.530052
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.042813
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9bc4a569e3cdc89fbf79f7c9e61695ff92401f12
| 3,752
|
py
|
Python
|
pyglitch/siOperations.py
|
HazelTheWitch/PyGlitch
|
c1c36de41ffdffec919e7932f4fd5b9759196295
|
[
"MIT"
] | 5
|
2020-01-25T01:04:00.000Z
|
2020-11-18T17:47:42.000Z
|
pyglitch/siOperations.py
|
HazelTheWitch/PyGlitch
|
c1c36de41ffdffec919e7932f4fd5b9759196295
|
[
"MIT"
] | 3
|
2021-06-08T20:51:30.000Z
|
2022-03-12T00:13:32.000Z
|
pyglitch/siOperations.py
|
HazelTheWitch/PyGlitch
|
c1c36de41ffdffec919e7932f4fd5b9759196295
|
[
"MIT"
] | null | null | null |
def siOr(s0, s1):
'''Performs s0 | s1 where s0 and s1 are lists of sections or intervals'''
actSec = [False, False]
secs = []
k0 = [i for s in s0 for i in s]
k1 = [i for s in s1 for i in s]
keyPoints = list(sorted([(k, 0) for k in k0]
+ [(k, 1) for k in k1], key=lambda x: x[0]))
X = None
for k, i in keyPoints:
a0 = actSec[0] | actSec[1]
actSec[i] = not actSec[i]
a1 = actSec[0] | actSec[1]
if a0 != a1:
if a1:
X = k
else:
if len(secs) > 0 and secs[-1][1] == X:
secs[-1] = (secs[-1][0], k)
else:
secs.append((X, k))
secs = [(a, b) for a, b in secs if a != b]
return secs
def siAnd(s0, s1):
'''Performs s0 & s1 where s0 and s1 are lists of sections or intervals'''
actSec = [False, False]
secs = []
k0 = [i for s in s0 for i in s]
k1 = [i for s in s1 for i in s]
keyPoints = list(sorted([(k, 0) for k in k0] + [(k, 1)
for k in k1], key=lambda x: x[0]))
X = None
for k, i in keyPoints:
a0 = actSec[0] & actSec[1]
actSec[i] = not actSec[i]
a1 = actSec[0] & actSec[1]
if a0 != a1:
if a1:
X = k
else:
if len(secs) > 0 and secs[-1][1] == X:
secs[-1] = (secs[-1][0], k)
else:
secs.append((X, k))
secs = [(a, b) for a, b in secs if a != b]
return secs
def siXor(s0, s1):
'''Performs s0 ^ s1 where s0 and s1 are lists of sections or intervals'''
actSec = [False, False]
secs = []
k0 = [i for s in s0 for i in s]
k1 = [i for s in s1 for i in s]
keyPoints = list(sorted([(k, 0) for k in k0] + [(k, 1)
for k in k1], key=lambda x: x[0]))
X = None
for k, i in keyPoints:
a0 = actSec[0] ^ actSec[1]
actSec[i] = not actSec[i]
a1 = actSec[0] ^ actSec[1]
if a0 != a1:
if a1:
X = k
else:
if len(secs) > 0 and secs[-1][1] == X:
secs[-1] = (secs[-1][0], k)
else:
secs.append((X, k))
secs = [(a, b) for a, b in secs if a != b]
return secs
def siMinus(s0, s1):
'''Performs s0 - s1 where s0 and s1 are lists of sections or intervals'''
actSec = [False, False]
secs = []
k0 = [i for s in s0 for i in s]
k1 = [i for s in s1 for i in s]
keyPoints = list(sorted([(k, 0) for k in k0] + [(k, 1)
for k in k1], key=lambda x: x[0]))
X = None
active = False
for k, i in keyPoints:
actSec[i] = not actSec[i]
if actSec[0] and not actSec[1]:
X = k
active = True
elif active and X is not None and (not actSec[0] or actSec[1]):
if len(secs) > 0 and secs[-1][1] == X:
secs[-1] = (secs[-1][0], k)
else:
secs.append((X, k))
active = False
secs = [(a, b) for a, b in secs if a != b]
return secs
def verifySI(s):
keyPoints = []
for x0, x1 in s:
keyPoints.append((x0, True))
keyPoints.append((x1, False))
keyPoints.sort(key=lambda x: x[0])
newS = []
X = None
depth = 0
for x, inS in keyPoints:
if inS:
if depth == 0:
X = x
depth += 1
else:
depth -= 1
if depth == 0:
newS.append((X, x))
return newS
| 23.898089
| 86
| 0.425107
| 562
| 3,752
| 2.838078
| 0.099644
| 0.022571
| 0.025078
| 0.03511
| 0.810031
| 0.781818
| 0.781818
| 0.781818
| 0.781818
| 0.781818
| 0
| 0.059923
| 0.44403
| 3,752
| 156
| 87
| 24.051282
| 0.704698
| 0.072228
| 0
| 0.719626
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046729
| false
| 0
| 0
| 0
| 0.093458
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
9bf196e781709b2361ed95f00efd35e988d8ac5c
| 23,339
|
py
|
Python
|
pay-api/tests/unit/services/test_payment.py
|
stevenc987/sbc-pay
|
04f02f362f88a30c082b0643583b8d0ebff6063f
|
[
"Apache-2.0"
] | null | null | null |
pay-api/tests/unit/services/test_payment.py
|
stevenc987/sbc-pay
|
04f02f362f88a30c082b0643583b8d0ebff6063f
|
[
"Apache-2.0"
] | null | null | null |
pay-api/tests/unit/services/test_payment.py
|
stevenc987/sbc-pay
|
04f02f362f88a30c082b0643583b8d0ebff6063f
|
[
"Apache-2.0"
] | null | null | null |
# Copyright © 2019 Province of British Columbia
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests to assure the FeeSchedule Service.
Test-Suite to ensure that the FeeSchedule Service is working as expected.
"""
from datetime import datetime
import pytz
from pay_api.models.payment_account import PaymentAccount
from pay_api.services.payment import Payment as Payment_service
from pay_api.utils.enums import InvoiceStatus, InvoiceReferenceStatus, PaymentMethod
from tests.utilities.base_test import (
factory_invoice, factory_payment_line_item, factory_invoice_reference, factory_payment, factory_payment_account)
def test_payment_saved_from_new(session):
"""Assert that the payment is saved to the table."""
payment_account = factory_payment_account()
payment = factory_payment()
payment_account.save()
payment.save()
invoice = factory_invoice(payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
p = Payment_service.find_by_id(payment.id)
assert p is not None
assert p.id is not None
assert p.payment_system_code is not None
assert p.payment_method_code is not None
assert p.payment_status_code is not None
def test_payment_invalid_lookup(session):
"""Test invalid lookup."""
p = Payment_service.find_by_id(999)
assert p is not None
assert p.id is None
def test_payment_with_no_active_invoice(session):
"""Assert that the payment is saved to the table."""
payment_account = factory_payment_account()
payment = factory_payment()
payment_account.save()
payment.save()
invoice = factory_invoice(payment_account, InvoiceStatus.DELETED.value)
invoice.save()
factory_invoice_reference(invoice.id).save()
p = Payment_service.find_by_id(payment.id)
assert p is not None
assert p.id is not None
def test_search_payment_history(session):
"""Assert that the search payment history is working."""
payment_account = factory_payment_account()
payment_account.save()
invoice = factory_invoice(payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
auth_account_id = PaymentAccount.find_by_id(payment_account.id).auth_account_id
results = Payment_service.search_purchase_history(auth_account_id=auth_account_id,
search_filter={}, limit=1, page=1)
assert results is not None
assert results.get('items') is not None
assert results.get('total') == 1
# Add one more payment
payment_account.save()
invoice = factory_invoice(payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
results = Payment_service.search_purchase_history(auth_account_id=auth_account_id,
search_filter={}, limit=1, page=1)
assert results is not None
assert results.get('items') is not None
assert results.get('total') == 2
# Search by different filter
search_filter = {
'status': 'CREATED'
}
results = Payment_service.search_purchase_history(auth_account_id=auth_account_id,
search_filter=search_filter, limit=1, page=1)
assert results is not None
assert results.get('items') is not None
assert results.get('total') == 2
# Search by different filter
search_filter = {
'status': 'COMPLETED'
}
results = Payment_service.search_purchase_history(auth_account_id=auth_account_id,
search_filter=search_filter, limit=1, page=1)
assert results is not None
assert results.get('items') is not None
assert results.get('total') == 0
# Search by different filter
search_filter = {
'folioNumber': '1234567890'
}
results = Payment_service.search_purchase_history(auth_account_id=auth_account_id,
search_filter=search_filter, limit=1, page=1)
assert results is not None
assert results.get('items') is not None
assert results.get('total') == 2
# Search by different filter
search_filter = {
'businessIdentifier': invoice.business_identifier
}
results = Payment_service.search_purchase_history(auth_account_id=auth_account_id,
search_filter=search_filter, limit=1, page=1)
assert results is not None
assert results.get('items') is not None
assert results.get('total') == 2
# Search by different filter
search_filter = {
'dateFilter': {
'createdFrom': datetime.now().strftime('%m/%d/%Y'),
'createdTo': datetime.now().strftime('%m/%d/%Y')
}
}
results = Payment_service.search_purchase_history(auth_account_id=auth_account_id,
search_filter=search_filter, limit=1, page=1)
assert results is not None
assert results.get('items') is not None
assert results.get('total') == 2
# Search by different filter
search_filter = {
'weekFilter': {
'index': 2
}
}
results = Payment_service.search_purchase_history(auth_account_id=auth_account_id,
search_filter=search_filter, limit=1, page=1)
assert results is not None
assert results.get('items') is not None
assert results.get('total') == 0
# TODO
# # Search by different filter
search_filter = {
'monthFilter': {
'month': datetime.now().month,
'year': datetime.now().year
}
}
results = Payment_service.search_purchase_history(auth_account_id=auth_account_id,
search_filter=search_filter, limit=1, page=1)
assert results is not None
assert results.get('items') is not None
assert results.get('total') == 2
# Search by different filter
search_filter = {
'createdBy': invoice.created_name
}
results = Payment_service.search_purchase_history(auth_account_id=auth_account_id,
search_filter=search_filter, limit=1, page=1)
assert results is not None
assert results.get('items') is not None
assert results.get('total') == 2
def test_search_payment_history_for_all(session):
"""Assert that the search payment history is working."""
payment_account = factory_payment_account()
payment_account.save()
auth_account_id = PaymentAccount.find_by_id(payment_account.id).auth_account_id
for i in range(20):
payment = factory_payment(payment_status_code='CREATED')
payment.save()
invoice = factory_invoice(payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
results = Payment_service.search_all_purchase_history(auth_account_id=auth_account_id, search_filter={})
assert results is not None
assert results.get('items') is not None
# Returns only the default number if payload is empty
assert results.get('total') == 10
def test_create_payment_report_csv(session, rest_call_mock):
"""Assert that the create payment report is working."""
payment_account = factory_payment_account()
payment_account.save()
auth_account_id = PaymentAccount.find_by_id(payment_account.id).auth_account_id
for i in range(20):
payment = factory_payment(payment_status_code='CREATED')
payment.save()
invoice = factory_invoice(payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
Payment_service.create_payment_report(auth_account_id=auth_account_id, search_filter={},
content_type='text/csv', report_name='test')
assert True # If no error, then good
def test_create_payment_report_pdf(session, rest_call_mock):
"""Assert that the create payment report is working."""
payment_account = factory_payment_account()
payment_account.save()
auth_account_id = PaymentAccount.find_by_id(payment_account.id).auth_account_id
for i in range(20):
payment = factory_payment(payment_status_code='CREATED')
payment.save()
invoice = factory_invoice(payment_account)
invoice.save()
factory_invoice_reference(invoice.id).save()
Payment_service.create_payment_report(auth_account_id=auth_account_id, search_filter={},
content_type='application/pdf', report_name='test')
assert True # If no error, then good
def test_search_payment_history_with_tz(session):
"""Assert that the search payment history is working."""
payment_account = factory_payment_account()
invoice_created_on = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
invoice_created_on = invoice_created_on.astimezone(pytz.utc)
payment = factory_payment(payment_status_code='CREATED')
payment_account.save()
payment.save()
invoice = factory_invoice(payment_account, created_on=invoice_created_on)
invoice.save()
factory_invoice_reference(invoice.id).save()
auth_account_id = PaymentAccount.find_by_id(payment_account.id).auth_account_id
results = Payment_service.search_purchase_history(auth_account_id=auth_account_id,
search_filter={}, limit=1, page=1)
assert results is not None
assert results.get('items') is not None
assert results.get('total') == 1
# Add one more payment
invoice_created_on = datetime.now().replace(hour=0, minute=0, second=0, microsecond=0)
invoice_created_on = invoice_created_on.astimezone(pytz.utc)
payment = factory_payment(payment_status_code='CREATED')
payment_account.save()
payment.save()
invoice = factory_invoice(payment_account, created_on=invoice_created_on)
invoice.save()
factory_invoice_reference(invoice.id).save()
results = Payment_service.search_purchase_history(auth_account_id=auth_account_id,
search_filter={}, limit=1, page=1)
assert results is not None
assert results.get('items') is not None
assert results.get('total') == 2
def test_search_account_payments(session):
"""Assert that the search account payments is working."""
inv_number = 'REG00001'
payment_account = factory_payment_account().save()
invoice_1 = factory_invoice(payment_account)
invoice_1.save()
factory_invoice_reference(invoice_1.id, invoice_number=inv_number).save()
payment_1 = factory_payment(payment_status_code='CREATED',
payment_account_id=payment_account.id, invoice_number=inv_number,
payment_method_code=PaymentMethod.PAD.value)
payment_1.save()
auth_account_id = PaymentAccount.find_by_id(payment_account.id).auth_account_id
results = Payment_service.search_account_payments(auth_account_id=auth_account_id,
status=None, limit=1, page=1)
assert results is not None
assert results.get('items') is not None
assert results.get('total') == 1
def test_search_account_failed_payments(session):
"""Assert that the search account payments is working."""
inv_number_1 = 'REG00001'
payment_account = factory_payment_account().save()
invoice_1 = factory_invoice(payment_account)
invoice_1.save()
inv_ref_1 = factory_invoice_reference(invoice_1.id, invoice_number=inv_number_1).save()
payment_1 = factory_payment(payment_status_code='FAILED',
payment_account_id=payment_account.id, invoice_number=inv_number_1,
payment_method_code=PaymentMethod.PAD.value)
payment_1.save()
auth_account_id = PaymentAccount.find_by_id(payment_account.id).auth_account_id
results = Payment_service.search_account_payments(auth_account_id=auth_account_id,
status='FAILED', limit=1, page=1)
assert results.get('items')
assert results.get('total') == 1
# Create one more payment with failed status.
inv_number_2 = 'REG00002'
invoice_2 = factory_invoice(payment_account)
invoice_2.save()
inv_ref_2 = factory_invoice_reference(invoice_2.id, invoice_number=inv_number_2).save()
payment_2 = factory_payment(payment_status_code='FAILED',
payment_account_id=payment_account.id, invoice_number=inv_number_2,
payment_method_code=PaymentMethod.PAD.value)
payment_2.save()
auth_account_id = PaymentAccount.find_by_id(payment_account.id).auth_account_id
results = Payment_service.search_account_payments(auth_account_id=auth_account_id,
status='FAILED', limit=1, page=1)
assert results.get('items')
assert results.get('total') == 2
# Now combine both payments into one, by setting status to invoice reference. - NSF payments
inv_ref_1.status_code = InvoiceReferenceStatus.CANCELLED.value
inv_ref_2.status_code = InvoiceReferenceStatus.CANCELLED.value
inv_ref_1.save()
inv_ref_2.save()
# Now create new invoice reference for consolidated invoice
inv_number_3 = 'REG00003'
factory_invoice_reference(invoice_1.id, invoice_number=inv_number_3).save()
factory_invoice_reference(invoice_2.id, invoice_number=inv_number_3).save()
results = Payment_service.search_account_payments(auth_account_id=auth_account_id,
status='FAILED', limit=1, page=1)
# Now there are no active failed payments, so it should return zero records
assert not results.get('items')
assert results.get('total') == 0
def test_create_account_payments_for_one_failed_payment(session):
"""Assert that the create account payments is working."""
inv_number_1 = 'REG00001'
payment_account = factory_payment_account().save()
invoice_1 = factory_invoice(payment_account)
invoice_1.save()
factory_invoice_reference(invoice_1.id, invoice_number=inv_number_1).save()
payment_1 = factory_payment(payment_status_code='FAILED',
payment_account_id=payment_account.id, invoice_number=inv_number_1,
payment_method_code=PaymentMethod.PAD.value)
payment_1.save()
auth_account_id = PaymentAccount.find_by_id(payment_account.id).auth_account_id
results = Payment_service.search_account_payments(auth_account_id=auth_account_id,
status='FAILED', limit=1, page=1)
assert results.get('total') == 1
new_payment = Payment_service.create_account_payment(auth_account_id=auth_account_id, is_retry_payment=True)
old_payment = Payment_service.find_by_id(payment_1.id)
# Assert new payment invoice number is same as old payment as there is only one failed payment.
assert new_payment.invoice_number == old_payment.invoice_number
def test_create_account_payments_for_multiple_failed_payments(session):
"""Assert that the create account payments is working."""
inv_number_1 = 'REG00001'
payment_account = factory_payment_account().save()
invoice_1 = factory_invoice(payment_account, total=100)
invoice_1.save()
factory_payment_line_item(invoice_id=invoice_1.id, fee_schedule_id=1).save()
factory_invoice_reference(invoice_1.id, invoice_number=inv_number_1).save()
payment_1 = factory_payment(payment_status_code='FAILED',
payment_account_id=payment_account.id,
invoice_number=inv_number_1,
invoice_amount=100,
payment_method_code=PaymentMethod.PAD.value)
payment_1.save()
# Create one more payment with failed status.
inv_number_2 = 'REG00002'
invoice_2 = factory_invoice(payment_account, total=100)
invoice_2.save()
factory_payment_line_item(invoice_id=invoice_2.id, fee_schedule_id=1).save()
factory_invoice_reference(invoice_2.id, invoice_number=inv_number_2).save()
payment_2 = factory_payment(payment_status_code='FAILED',
payment_account_id=payment_account.id,
invoice_number=inv_number_2,
invoice_amount=100,
payment_method_code=PaymentMethod.PAD.value)
payment_2.save()
auth_account_id = PaymentAccount.find_by_id(payment_account.id).auth_account_id
results = Payment_service.search_account_payments(auth_account_id=auth_account_id,
status='FAILED', limit=10, page=1)
assert results.get('total') == 2
new_payment = Payment_service.create_account_payment(auth_account_id=auth_account_id, is_retry_payment=True)
payment_1 = Payment_service.find_by_id(payment_1.id)
payment_2 = Payment_service.find_by_id(payment_2.id)
# Assert new payment invoice number is different from old payment as there are more than one failed payments.
assert new_payment.invoice_number != payment_1.invoice_number
assert new_payment.invoice_number != payment_2.invoice_number
assert payment_1.cons_inv_number == new_payment.invoice_number
assert payment_2.cons_inv_number == new_payment.invoice_number
assert new_payment.invoice_amount == payment_1.invoice_amount + payment_2.invoice_amount
def test_create_account_payments_after_consolidation(session):
"""Assert creating account payments after consolidation yields same payment record."""
inv_number_1 = 'REG00001'
payment_account = factory_payment_account().save()
invoice_1 = factory_invoice(payment_account, total=100)
invoice_1.save()
factory_payment_line_item(invoice_id=invoice_1.id, fee_schedule_id=1).save()
factory_invoice_reference(invoice_1.id, invoice_number=inv_number_1).save()
payment_1 = factory_payment(payment_status_code='FAILED',
payment_account_id=payment_account.id,
invoice_number=inv_number_1,
invoice_amount=100,
payment_method_code=PaymentMethod.PAD.value)
payment_1.save()
# Create one more payment with failed status.
inv_number_2 = 'REG00002'
invoice_2 = factory_invoice(payment_account, total=100)
invoice_2.save()
factory_payment_line_item(invoice_id=invoice_2.id, fee_schedule_id=1).save()
factory_invoice_reference(invoice_2.id, invoice_number=inv_number_2).save()
payment_2 = factory_payment(payment_status_code='FAILED',
payment_account_id=payment_account.id,
invoice_number=inv_number_2,
invoice_amount=100,
payment_method_code=PaymentMethod.PAD.value)
payment_2.save()
auth_account_id = PaymentAccount.find_by_id(payment_account.id).auth_account_id
results = Payment_service.search_account_payments(auth_account_id=auth_account_id,
status='FAILED', limit=10, page=1)
assert results.get('total') == 2
new_payment_1 = Payment_service.create_account_payment(auth_account_id=auth_account_id, is_retry_payment=True)
# Create account payment again and assert both payments returns same.
new_payment_2 = Payment_service.create_account_payment(auth_account_id=auth_account_id, is_retry_payment=True)
assert new_payment_1.id == new_payment_2.id
def test_failed_payment_after_consolidation(session):
"""Assert creating account payments after consolidation works."""
# Create 2 failed payments, consolidate it, and then again create another failed payment.
# Consolidate it and make sure amount matches.
inv_number_1 = 'REG00001'
payment_account = factory_payment_account().save()
invoice_1 = factory_invoice(payment_account, total=100)
invoice_1.save()
factory_payment_line_item(invoice_id=invoice_1.id, fee_schedule_id=1).save()
factory_invoice_reference(invoice_1.id, invoice_number=inv_number_1).save()
payment_1 = factory_payment(payment_status_code='FAILED',
payment_account_id=payment_account.id,
invoice_number=inv_number_1,
invoice_amount=100,
payment_method_code=PaymentMethod.PAD.value)
payment_1.save()
# Create one more payment with failed status.
inv_number_2 = 'REG00002'
invoice_2 = factory_invoice(payment_account, total=100)
invoice_2.save()
factory_payment_line_item(invoice_id=invoice_2.id, fee_schedule_id=1).save()
factory_invoice_reference(invoice_2.id, invoice_number=inv_number_2).save()
payment_2 = factory_payment(payment_status_code='FAILED',
payment_account_id=payment_account.id,
invoice_number=inv_number_2,
invoice_amount=100,
payment_method_code=PaymentMethod.PAD.value)
payment_2.save()
auth_account_id = PaymentAccount.find_by_id(payment_account.id).auth_account_id
results = Payment_service.search_account_payments(auth_account_id=auth_account_id,
status='FAILED', limit=10, page=1)
assert results.get('total') == 2
new_payment_1 = Payment_service.create_account_payment(auth_account_id=auth_account_id, is_retry_payment=True)
# Create another failed payment.
inv_number_3 = 'REG00003'
invoice_3 = factory_invoice(payment_account, total=100)
invoice_3.save()
factory_payment_line_item(invoice_id=invoice_3.id, fee_schedule_id=1).save()
factory_invoice_reference(invoice_3.id, invoice_number=inv_number_3).save()
payment_3 = factory_payment(payment_status_code='FAILED',
payment_account_id=payment_account.id,
invoice_number=inv_number_3,
invoice_amount=100,
payment_method_code=PaymentMethod.PAD.value)
payment_3.save()
new_payment_2 = Payment_service.create_account_payment(auth_account_id=auth_account_id, is_retry_payment=True)
assert new_payment_1.id != new_payment_2.id
assert new_payment_2.invoice_amount == payment_1.invoice_amount + payment_2.invoice_amount + \
payment_3.invoice_amount
| 44.455238
| 116
| 0.687476
| 2,937
| 23,339
| 5.130065
| 0.081716
| 0.069291
| 0.070751
| 0.054424
| 0.857702
| 0.844561
| 0.81934
| 0.79704
| 0.781111
| 0.769762
| 0
| 0.018793
| 0.231672
| 23,339
| 524
| 117
| 44.540076
| 0.821381
| 0.11123
| 0
| 0.761155
| 0
| 0
| 0.029962
| 0
| 0
| 0
| 0
| 0.001908
| 0.188976
| 1
| 0.036745
| false
| 0
| 0.015748
| 0
| 0.052493
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5015412d3e6b3aa674b4a8718fefe12edb6c6983
| 332
|
py
|
Python
|
atividades/exec108.py
|
Fleen66/Python_exercises
|
fd05fdf1181da833a1a1bc9f4a476afc8f467977
|
[
"MIT"
] | null | null | null |
atividades/exec108.py
|
Fleen66/Python_exercises
|
fd05fdf1181da833a1a1bc9f4a476afc8f467977
|
[
"MIT"
] | null | null | null |
atividades/exec108.py
|
Fleen66/Python_exercises
|
fd05fdf1181da833a1a1bc9f4a476afc8f467977
|
[
"MIT"
] | null | null | null |
import ex108
n1 = int(input('Digite um valor: R$: '))
print(f'A metade de {ex108.moeda(n1)} é {ex108.moeda(ex108.metade(n1))}')
print(f'O dobro de {ex108.moeda(n1)} é {ex108.moeda(ex108.dobro(n1))}')
print(f'O aumento de 10% é {ex108.moeda(ex108.aumentar(n1,10))}')
print(f'A redução de 10% é {ex108.moeda(ex108.diminuir(n1, 10))}')
| 55.333333
| 73
| 0.683735
| 63
| 332
| 3.603175
| 0.365079
| 0.264317
| 0.193833
| 0.281938
| 0.440529
| 0.440529
| 0.264317
| 0.264317
| 0
| 0
| 0
| 0.160535
| 0.099398
| 332
| 6
| 74
| 55.333333
| 0.598662
| 0
| 0
| 0
| 0
| 0.333333
| 0.768769
| 0.384384
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.166667
| 0
| 0.166667
| 0.666667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
5026e929d0a693cc66ee3e2850b29d9060b52663
| 47
|
py
|
Python
|
amocrm_asterisk_ng/scenario/impl/__init__.py
|
iqtek/amocrn_asterisk_ng
|
429a8d0823b951c855a49c1d44ab0e05263c54dc
|
[
"MIT"
] | null | null | null |
amocrm_asterisk_ng/scenario/impl/__init__.py
|
iqtek/amocrn_asterisk_ng
|
429a8d0823b951c855a49c1d44ab0e05263c54dc
|
[
"MIT"
] | null | null | null |
amocrm_asterisk_ng/scenario/impl/__init__.py
|
iqtek/amocrn_asterisk_ng
|
429a8d0823b951c855a49c1d44ab0e05263c54dc
|
[
"MIT"
] | null | null | null |
from .scenario_startup import scenario_startup
| 23.5
| 46
| 0.893617
| 6
| 47
| 6.666667
| 0.666667
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085106
| 47
| 1
| 47
| 47
| 0.930233
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
ac958c8de20181a666042a849ea9ee93e25dcae1
| 32,919
|
py
|
Python
|
DQM/EcalMonitorTasks/python/ecalGpuTask_cfi.py
|
PKUfudawei/cmssw
|
8fbb5ce74398269c8a32956d7c7943766770c093
|
[
"Apache-2.0"
] | 1
|
2021-11-30T16:24:46.000Z
|
2021-11-30T16:24:46.000Z
|
DQM/EcalMonitorTasks/python/ecalGpuTask_cfi.py
|
PKUfudawei/cmssw
|
8fbb5ce74398269c8a32956d7c7943766770c093
|
[
"Apache-2.0"
] | 4
|
2021-11-29T13:57:56.000Z
|
2022-03-29T06:28:36.000Z
|
DQM/EcalMonitorTasks/python/ecalGpuTask_cfi.py
|
PKUfudawei/cmssw
|
8fbb5ce74398269c8a32956d7c7943766770c093
|
[
"Apache-2.0"
] | 1
|
2022-02-27T06:12:26.000Z
|
2022-02-27T06:12:26.000Z
|
import FWCore.ParameterSet.Config as cms
digiSamples_ = [1,2,3,4,5,6,7,8,9,10]
uncalibOOTAmps_ = [4,6]
ecalGpuTask = cms.untracked.PSet(
params = cms.untracked.PSet(
runGpuTask = cms.untracked.bool(False),
gpuOnlyPlots = cms.untracked.bool(True),
uncalibOOTAmps = cms.untracked.vint32(uncalibOOTAmps_)
),
MEs = cms.untracked.PSet(
# CPU Digi
DigiCpu = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT digi nDigis cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(5000),
title = cms.untracked.string('Digis per Event')
),
description = cms.untracked.string('Number of CPU Digis per Event')
),
DigiCpuAmplitude = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT digi amplitude sample %(sample)s cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
multi = cms.untracked.PSet(
sample = cms.untracked.vint32(digiSamples_)
),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(4096),
title = cms.untracked.string('ADC Counts')
),
description = cms.untracked.string('CPU digi amplitudes for individual digi samples (1-10)')
),
# GPU Digi (optional)
DigiGpu = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT digi nDigis gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(5000),
title = cms.untracked.string('Digis per Event')
),
description = cms.untracked.string('Number of GPU Digis per Event')
),
DigiGpuAmplitude = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT digi amplitude sample %(sample)s gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
multi = cms.untracked.PSet(
sample = cms.untracked.vint32(digiSamples_)
),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(4096),
title = cms.untracked.string('ADC Counts')
),
description = cms.untracked.string('GPU digi amplitudes for individual digi samples (1-10)')
),
# Digi GPU-CPU Difference
DigiGpuCpu = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT digi nDigis gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-500),
high = cms.untracked.double(500),
title = cms.untracked.string('GPU-CPU Digis per Event')
),
description = cms.untracked.string('GPU-CPU difference of number of Digis per Event')
),
DigiGpuCpuAmplitude = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT digi amplitude sample %(sample)s gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
multi = cms.untracked.PSet(
sample = cms.untracked.vint32(digiSamples_)
),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-100),
high = cms.untracked.double(100),
title = cms.untracked.string('ADC Counts')
),
description = cms.untracked.string('GPU-CPU difference of digi amplitude for individual digi samples (1-10)')
),
# CPU UncalibRecHit
UncalibCpu = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit nHits cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(5000),
title = cms.untracked.string('Uncalibrated Rec Hits per Event')
),
description = cms.untracked.string('Number of CPU Uncalibrated Rec Hits per Event')
),
UncalibCpuAmp = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit amplitude cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(5000),
title = cms.untracked.string('Amplitude')
),
description = cms.untracked.string('CPU Uncalibrated Rec Hit reconstructed amplitude')
),
UncalibCpuAmpError = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit amplitudeError cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(200),
title = cms.untracked.string('Amplitude Error')
),
description = cms.untracked.string('CPU Uncalibrated Rec Hit reconstructed amplitude uncertainty')
),
UncalibCpuPedestal = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit pedestal cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(1000),
title = cms.untracked.string('Pedestal')
),
description = cms.untracked.string('CPU Uncalibrated Rec Hit reconstructed pedestal')
),
UncalibCpuJitter = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit jitter cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-5),
high = cms.untracked.double(5),
title = cms.untracked.string('Jitter')
),
description = cms.untracked.string('CPU Uncalibrated Rec Hit reconstructed time jitter')
),
UncalibCpuJitterError = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit jitterError cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(25),
low = cms.untracked.double(0),
high = cms.untracked.double(0.25), # If you edit this, also change 10k bin in GpuTask.cc
title = cms.untracked.string('Jitter Error')
),
description = cms.untracked.string('CPU Uncalibrated Rec Hit reconstructed time jitter uncertainty. 10000 is special value, shown in last bin')
),
UncalibCpuChi2 = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit chi2 cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(200),
title = cms.untracked.string('Chi2')
),
description = cms.untracked.string('CPU Uncalibrated Rec Hit chi2 of the pulse')
),
UncalibCpuOOTAmp = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit OOT amplitude %(OOTAmp)s cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
multi = cms.untracked.PSet(
OOTAmp = cms.untracked.vint32(uncalibOOTAmps_)
),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(500),
title = cms.untracked.string('OOT Amplitude')
),
description = cms.untracked.string('CPU Uncalibrated Rec Hit out-of-time reconstructed amplitude. Indicies go from 0 to 9, with event BX at index 5. Index 4 == BX-1, index 6 == BX+1, etc.')
),
UncalibCpuFlags = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit flags cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(64),
low = cms.untracked.double(0),
high = cms.untracked.double(64),
title = cms.untracked.string('Flags')
),
description = cms.untracked.string('CPU Uncalibrated Rec Hit flag to be propagated to RecHit')
),
# GPU UncalibRecHit (optional)
UncalibGpu = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit nHits gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(5000),
title = cms.untracked.string('Uncalibrated Rec Hits per Event')
),
description = cms.untracked.string('Number of GPU Uncalibrated Rec Hits per Event')
),
UncalibGpuAmp = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit amplitude gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(5000),
title = cms.untracked.string('Amplitude')
),
description = cms.untracked.string('GPU Uncalibrated Rec Hit reconstructed amplitude')
),
UncalibGpuAmpError = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit amplitudeError gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(200),
title = cms.untracked.string('Amplitude Error')
),
description = cms.untracked.string('GPU Uncalibrated Rec Hit reconstructed amplitude uncertainty')
),
UncalibGpuPedestal = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit pedestal gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(1000),
title = cms.untracked.string('Pedestal')
),
description = cms.untracked.string('GPU Uncalibrated Rec Hit reconstructed pedestal')
),
UncalibGpuJitter = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit jitter gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-5),
high = cms.untracked.double(5),
title = cms.untracked.string('Jitter')
),
description = cms.untracked.string('GPU Uncalibrated Rec Hit reconstructed time jitter')
),
UncalibGpuJitterError = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit jitterError gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(25),
low = cms.untracked.double(0),
high = cms.untracked.double(0.25), # If you edit this, also change 10k bin in GpuTask.cc
title = cms.untracked.string('Jitter Error')
),
description = cms.untracked.string('GPU Uncalibrated Rec Hit reconstructed time jitter uncertainty. 10000 is special value, shown in last bin')
),
UncalibGpuChi2 = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit chi2 gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(200),
title = cms.untracked.string('Chi2')
),
description = cms.untracked.string('GPU Uncalibrated Rec Hit chi2 of the pulse')
),
UncalibGpuOOTAmp = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit OOT amplitude %(OOTAmp)s gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
multi = cms.untracked.PSet(
OOTAmp = cms.untracked.vint32(uncalibOOTAmps_)
),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(500),
title = cms.untracked.string('OOT Amplitude')
),
description = cms.untracked.string('GPU Uncalibrated Rec Hit out-of-time reconstructed amplitude. Indicies go from 0 to 9, with event BX at index 5. Index 4 == BX-1, index 6 == BX+1, etc.')
),
UncalibGpuFlags = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit flags gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(64),
low = cms.untracked.double(0),
high = cms.untracked.double(64),
title = cms.untracked.string('Flags')
),
description = cms.untracked.string('GPU Uncalibrated Rec Hit flag to be propagated to RecHit')
),
# UncalibRecHit GPU-CPU Difference
UncalibGpuCpu = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit nHits gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-500),
high = cms.untracked.double(500),
title = cms.untracked.string('GPU-CPU Uncalibrated Rec Hits per Event')
),
description = cms.untracked.string('GPU-CPU difference of number of Uncalibrated Rec Hits per Event')
),
UncalibGpuCpuAmp = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit amplitude gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-100),
high = cms.untracked.double(100),
title = cms.untracked.string('GPU-CPU Amplitude')
),
description = cms.untracked.string('GPU-CPU difference of Uncalibrated Rec Hit reconstructed amplitude')
),
UncalibGpuCpuAmpError = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit amplitudeError gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-50),
high = cms.untracked.double(50),
title = cms.untracked.string('GPU-CPU Amplitude Error')
),
description = cms.untracked.string('GPU-CPU difference of Uncalibrated Rec Hit reconstructed amplitude uncertainty')
),
UncalibGpuCpuPedestal = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit pedestal gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-50),
high = cms.untracked.double(50),
title = cms.untracked.string('GPU-CPU Pedestal')
),
description = cms.untracked.string('GPU-CPU difference of Uncalibrated Rec Hit reconstructed pedestal')
),
UncalibGpuCpuJitter = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit jitter gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-1),
high = cms.untracked.double(1),
title = cms.untracked.string('GPU-CPU Jitter')
),
description = cms.untracked.string('GPU-CPU difference of Uncalibrated Rec Hit reconstructed time jitter')
),
UncalibGpuCpuJitterError = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit jitterError gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-0.03),
high = cms.untracked.double(0.03),
title = cms.untracked.string('GPU-CPU Jitter Error')
),
description = cms.untracked.string('GPU-CPU difference of Uncalibrated Rec Hit reconstructed time jitter uncertainty. 10000 is special value, shown in last bin')
),
UncalibGpuCpuChi2 = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit chi2 gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-20),
high = cms.untracked.double(20),
title = cms.untracked.string('GPU-CPU Chi2')
),
description = cms.untracked.string('GPU-CPU difference of Uncalibrated Rec Hit chi2 of the pulse')
),
UncalibGpuCpuOOTAmp = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit OOT amplitude %(OOTAmp)s gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
multi = cms.untracked.PSet(
OOTAmp = cms.untracked.vint32(uncalibOOTAmps_)
),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-50),
high = cms.untracked.double(50),
title = cms.untracked.string('GPU-CPU OOT Amplitude')
),
description = cms.untracked.string('GPU-CPU difference of Uncalibrated Rec Hit out-of-time reconstructed amplitude. Indicies go from 0 to 9, with event BX at index 5. Index 4 == BX-1, index 6 == BX+1, etc.')
),
UncalibGpuCpuFlags = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT uncalib rec hit flags gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(128),
low = cms.untracked.double(-64),
high = cms.untracked.double(64),
title = cms.untracked.string('GPU-CPU Flags')
),
description = cms.untracked.string('GPU-CPU difference of Uncalibrated Rec Hit flag to be propagated to RecHit')
),
# CPU RecHit
RecHitCpu = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT rec hit nHits cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(5000),
title = cms.untracked.string('Rec Hits per Event')
),
description = cms.untracked.string('Number of CPU Rec Hits per Event')
),
RecHitCpuEnergy = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT rec hit energy cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(5.0),
title = cms.untracked.string('Energy (Gev)')
),
description = cms.untracked.string('CPU Rec Hit Energy (GeV)')
),
RecHitCpuTime = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT rec hit time cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-25.0),
high = cms.untracked.double(25.0),
title = cms.untracked.string('Time (ns)')
),
description = cms.untracked.string('CPU Rec Hit Time')
),
RecHitCpuFlags = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT rec hit flags cpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(1024),
title = cms.untracked.string('Flags')
),
description = cms.untracked.string('CPU Rec Hit Flags')
),
# GPU RecHit (optional)
RecHitGpu = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT rec hit nHits gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(5000),
title = cms.untracked.string('Rec Hits per Event')
),
description = cms.untracked.string('Number of GPU Rec Hits per Event')
),
RecHitGpuEnergy = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT rec hit energy gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(5.0),
title = cms.untracked.string('Energy (Gev)')
),
description = cms.untracked.string('GPU Rec Hit Energy (GeV)')
),
RecHitGpuTime = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT rec hit time gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-25.0),
high = cms.untracked.double(25.0),
title = cms.untracked.string('Time (ns)')
),
description = cms.untracked.string('GPU Rec Hit Time')
),
RecHitGpuFlags = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT rec hit flags gpu'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(0),
high = cms.untracked.double(1024),
title = cms.untracked.string('Flags')
),
description = cms.untracked.string('GPU Rec Hit Flags')
),
# RecHit GPU-CPU Difference
RecHitGpuCpu = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT rec hit nHits gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-500),
high = cms.untracked.double(500),
title = cms.untracked.string('GPU-CPU Rec Hits per Event')
),
description = cms.untracked.string('GPU-CPU difference of number of total Rec Hits per Event')
),
RecHitGpuCpuEnergy = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT rec hit energy gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-1.0),
high = cms.untracked.double(1.0),
title = cms.untracked.string('GPU-CPU Energy (GeV)')
),
description = cms.untracked.string('GPU-CPU difference of Rec Hit Energy (GeV)')
),
RecHitGpuCpuTime = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT rec hit time gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-2.5),
high = cms.untracked.double(2.5),
title = cms.untracked.string('GPU-CPU Time (ns)')
),
description = cms.untracked.string('GPU-CPU difference of Rec Hit Time')
),
RecHitGpuCpuFlags = cms.untracked.PSet(
path = cms.untracked.string('%(subdet)s/%(prefix)sGpuTask/%(prefix)sGT rec hit flags gpu-cpu diff'),
kind = cms.untracked.string('TH1F'),
otype = cms.untracked.string('Ecal2P'),
btype = cms.untracked.string('User'),
xaxis = cms.untracked.PSet(
nbins = cms.untracked.int32(100),
low = cms.untracked.double(-1024),
high = cms.untracked.double(1024),
title = cms.untracked.string('GPU-CPU Flags')
),
description = cms.untracked.string('GPU-CPU differnece of Rec Hit Flags')
)
)
)
| 52.502392
| 219
| 0.564841
| 3,496
| 32,919
| 5.316076
| 0.05349
| 0.331235
| 0.261501
| 0.048426
| 0.929298
| 0.918698
| 0.90869
| 0.898036
| 0.890557
| 0.876782
| 0
| 0.026425
| 0.306814
| 32,919
| 626
| 220
| 52.586262
| 0.788028
| 0.008961
| 0
| 0.720325
| 0
| 0.013008
| 0.215902
| 0.056574
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.001626
| 0
| 0.001626
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4a21ff1f809e8a338db7ed6d1960549ba1adae4a
| 135
|
py
|
Python
|
utils/__init__.py
|
tamnguyenvan/lipreading
|
37f7fc4840cacad9767beba0452cfcc194a2ba1f
|
[
"Apache-2.0"
] | null | null | null |
utils/__init__.py
|
tamnguyenvan/lipreading
|
37f7fc4840cacad9767beba0452cfcc194a2ba1f
|
[
"Apache-2.0"
] | null | null | null |
utils/__init__.py
|
tamnguyenvan/lipreading
|
37f7fc4840cacad9767beba0452cfcc194a2ba1f
|
[
"Apache-2.0"
] | null | null | null |
from .dataset import LRWDataset
from .dataset_lrw1000 import LRW1000_Dataset
from .dataset import AVDataset
from .cvtransforms import *
| 33.75
| 44
| 0.851852
| 17
| 135
| 6.647059
| 0.411765
| 0.292035
| 0.300885
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.066667
| 0.111111
| 135
| 4
| 45
| 33.75
| 0.875
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
4a26c8d1c7e68ea2366f00c6869f1d0e1b77976c
| 107
|
py
|
Python
|
mkdocs/utils/filters.py
|
davidhrbac/mkdocs
|
3c8a1fccca29272ce327e89c398a55771a7f5635
|
[
"BSD-2-Clause"
] | 57
|
2016-09-28T01:19:35.000Z
|
2022-01-07T13:59:21.000Z
|
mkdocs/utils/filters.py
|
hufyhang/mkdocs
|
4c4ef7fa7224713e17d479742c2df1b2fc78edcb
|
[
"BSD-2-Clause"
] | 16
|
2017-02-06T15:48:03.000Z
|
2018-02-28T21:40:10.000Z
|
mkdocs/utils/filters.py
|
hufyhang/mkdocs
|
4c4ef7fa7224713e17d479742c2df1b2fc78edcb
|
[
"BSD-2-Clause"
] | 81
|
2016-09-06T04:21:06.000Z
|
2022-03-10T06:32:45.000Z
|
import json
import jinja2
def tojson(obj, **kwargs):
return jinja2.Markup(json.dumps(obj, **kwargs))
| 15.285714
| 51
| 0.71028
| 15
| 107
| 5.066667
| 0.666667
| 0.236842
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.021978
| 0.149533
| 107
| 6
| 52
| 17.833333
| 0.813187
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| false
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
c59e9c0cf4e6261484033fb6cd57eb7fbd031ddb
| 92,811
|
py
|
Python
|
RestPy/ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/openflowchannel.py
|
ralfjon/IxNetwork
|
c0c834fbc465af69c12fd6b7cee4628baba7fff1
|
[
"MIT"
] | null | null | null |
RestPy/ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/openflowchannel.py
|
ralfjon/IxNetwork
|
c0c834fbc465af69c12fd6b7cee4628baba7fff1
|
[
"MIT"
] | null | null | null |
RestPy/ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/openflowchannel.py
|
ralfjon/IxNetwork
|
c0c834fbc465af69c12fd6b7cee4628baba7fff1
|
[
"MIT"
] | null | null | null |
# Copyright 1997 - 2018 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class OpenFlowChannel(Base):
"""The OpenFlowChannel class encapsulates a user managed openFlowChannel node in the ixnetwork hierarchy.
An instance of the class can be obtained by accessing the OpenFlowChannel property from a parent instance.
The internal properties list will be empty when the property is accessed and is populated from the server using the find method.
The internal properties list can be managed by the user by using the add and remove methods.
"""
_SDM_NAME = 'openFlowChannel'
def __init__(self, parent):
super(OpenFlowChannel, self).__init__(parent)
@property
def Groups(self):
"""An instance of the Groups class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.groups.Groups)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.groups import Groups
return Groups(self)
@property
def Meters(self):
"""An instance of the Meters class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.meters.Meters)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.meters import Meters
return Meters(self)
@property
def Tables(self):
"""An instance of the Tables class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tables.Tables)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tables import Tables
return Tables(self)
@property
def Active(self):
"""Activate/Deactivate Configuration
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('active')
@property
def CalcFlowRate(self):
"""If selected, the statistics on the rate of transmission of flows per second by the controller is published.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('calcFlowRate')
@property
def CalcFlowRateWithBarrier(self):
"""If selected, statistics on the rate of transmission of flows per second by the controller, along with Barrier Request messages is published.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('calcFlowRateWithBarrier')
@property
def ConnectedVia(self):
"""List of layers this layer used to connect to the wire
Returns:
list(str[None|/api/v1/sessions/1/ixnetwork/topology?deepchild=*])
"""
return self._get_attribute('connectedVia')
@ConnectedVia.setter
def ConnectedVia(self, value):
self._set_attribute('connectedVia', value)
@property
def ControllerIndex(self):
"""Parent Controller Index
Returns:
list(str)
"""
return self._get_attribute('controllerIndex')
@property
def ControllerName(self):
"""Parent Controller Name
Returns:
str
"""
return self._get_attribute('controllerName')
@property
def Count(self):
"""Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group
Returns:
number
"""
return self._get_attribute('count')
@property
def DatapathId(self):
"""The Datapath ID of the OF Channel.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('datapathId')
@property
def DatapathIdHex(self):
"""The Datapath ID in hexadecimal format.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('datapathIdHex')
@property
def DescriptiveName(self):
"""Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but maybe offers more context
Returns:
str
"""
return self._get_attribute('descriptiveName')
@property
def EnableHelloElement(self):
"""If selected, the Controller sends a hello message consisting of an OpenFlow header and a set of variable size hello elements to inform the initial handshake of the connection.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('enableHelloElement')
@property
def Errors(self):
"""A list of errors that have occurred
Returns:
list(dict(arg1:str[None|/api/v1/sessions/1/ixnetwork/?deepchild=*],arg2:list[str]))
"""
return self._get_attribute('errors')
@property
def FlowTxBurstSize(self):
"""Specify the number of Flow transmitting packets that can be sent in a single burst within the time frame specified by the Inter Flow Burst Gap value.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('flowTxBurstSize')
@property
def GroupsPerChannel(self):
"""Number of Groups per Channel
Returns:
number
"""
return self._get_attribute('groupsPerChannel')
@GroupsPerChannel.setter
def GroupsPerChannel(self, value):
self._set_attribute('groupsPerChannel', value)
@property
def InterFlowBurstGap(self):
"""Specify the duration (in milliseconds) for which the controller waits between successive flow advertisements.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('interFlowBurstGap')
@property
def LocalIp(self):
"""The local IP address of the interface. This field is auto-populated and cannot be changed.
Returns:
list(str)
"""
return self._get_attribute('localIp')
@property
def MaxFlowsAtATime(self):
"""The Max Number of Flows Processed at a Time is the size of an internal buffer maintained by the Ixiacontroller, which prevents it from sending more flows than the Openflow switch can consume at a time.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('maxFlowsAtATime')
@property
def MetersPerChannel(self):
"""Number of Meters per Channel
Returns:
number
"""
return self._get_attribute('metersPerChannel')
@MetersPerChannel.setter
def MetersPerChannel(self, value):
self._set_attribute('metersPerChannel', value)
@property
def Multiplier(self):
"""Number of layer instances per parent instance (multiplier)
Returns:
number
"""
return self._get_attribute('multiplier')
@Multiplier.setter
def Multiplier(self, value):
self._set_attribute('multiplier', value)
@property
def Name(self):
"""Name of NGPF element, guaranteed to be unique in Scenario
Returns:
str
"""
return self._get_attribute('name')
@Name.setter
def Name(self, value):
self._set_attribute('name', value)
@property
def RemoteIp(self):
"""The IP address of the DUT at the other end of the OF Channel.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('remoteIp')
@property
def SendRoleRequest(self):
"""If selected, the controller sends a Role Request message after the connection is established; to change its role according to the Role Request option selected.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('sendRoleRequest')
@property
def SessionStatus(self):
"""Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
Returns:
list(str[down|notStarted|up])
"""
return self._get_attribute('sessionStatus')
@property
def StackedLayers(self):
"""List of secondary (many to one) child layer protocols
Returns:
list(str[None|/api/v1/sessions/1/ixnetwork/topology?deepchild=*])
"""
return self._get_attribute('stackedLayers')
@StackedLayers.setter
def StackedLayers(self, value):
self._set_attribute('stackedLayers', value)
@property
def StartupGenerationId(self):
"""A 64-bit sequence number field that identifies a given mastership view.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('startupGenerationId')
@property
def StartupRoleRequest(self):
"""This defines role of the controller.Options include: 1) No Change 2) Equal 3) Master 4) Slave
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('startupRoleRequest')
@property
def StateCounts(self):
"""A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
Returns:
dict(total:number,notStarted:number,down:number,up:number)
"""
return self._get_attribute('stateCounts')
@property
def Status(self):
"""Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
Returns:
str(configured|error|mixed|notStarted|started|starting|stopping)
"""
return self._get_attribute('status')
@property
def TablesPerChannel(self):
"""Number of Tables per Channel
Returns:
number
"""
return self._get_attribute('tablesPerChannel')
@TablesPerChannel.setter
def TablesPerChannel(self, value):
self._set_attribute('tablesPerChannel', value)
@property
def UseDatapathID(self):
"""If selected, the Datapath ID and IP address are used as the OF Channel identifier.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('useDatapathID')
def add(self, ConnectedVia=None, GroupsPerChannel=None, MetersPerChannel=None, Multiplier=None, Name=None, StackedLayers=None, TablesPerChannel=None):
"""Adds a new openFlowChannel node on the server and retrieves it in this instance.
Args:
ConnectedVia (list(str[None|/api/v1/sessions/1/ixnetwork/topology?deepchild=*])): List of layers this layer used to connect to the wire
GroupsPerChannel (number): Number of Groups per Channel
MetersPerChannel (number): Number of Meters per Channel
Multiplier (number): Number of layer instances per parent instance (multiplier)
Name (str): Name of NGPF element, guaranteed to be unique in Scenario
StackedLayers (list(str[None|/api/v1/sessions/1/ixnetwork/topology?deepchild=*])): List of secondary (many to one) child layer protocols
TablesPerChannel (number): Number of Tables per Channel
Returns:
self: This instance with all currently retrieved openFlowChannel data using find and the newly added openFlowChannel data available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._create(locals())
def remove(self):
"""Deletes all the openFlowChannel data in this instance from server.
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(self, ConnectedVia=None, ControllerIndex=None, ControllerName=None, Count=None, DescriptiveName=None, Errors=None, GroupsPerChannel=None, LocalIp=None, MetersPerChannel=None, Multiplier=None, Name=None, SessionStatus=None, StackedLayers=None, StateCounts=None, Status=None, TablesPerChannel=None):
"""Finds and retrieves openFlowChannel data from the server.
All named parameters support regex and can be used to selectively retrieve openFlowChannel data from the server.
By default the find method takes no parameters and will retrieve all openFlowChannel data from the server.
Args:
ConnectedVia (list(str[None|/api/v1/sessions/1/ixnetwork/topology?deepchild=*])): List of layers this layer used to connect to the wire
ControllerIndex (list(str)): Parent Controller Index
ControllerName (str): Parent Controller Name
Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group
DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but maybe offers more context
Errors (list(dict(arg1:str[None|/api/v1/sessions/1/ixnetwork/?deepchild=*],arg2:list[str]))): A list of errors that have occurred
GroupsPerChannel (number): Number of Groups per Channel
LocalIp (list(str)): The local IP address of the interface. This field is auto-populated and cannot be changed.
MetersPerChannel (number): Number of Meters per Channel
Multiplier (number): Number of layer instances per parent instance (multiplier)
Name (str): Name of NGPF element, guaranteed to be unique in Scenario
SessionStatus (list(str[down|notStarted|up])): Current state of protocol session: Not Started - session negotiation not started, the session is not active yet. Down - actively trying to bring up a protocol session, but negotiation is didn't successfully complete (yet). Up - session came up successfully.
StackedLayers (list(str[None|/api/v1/sessions/1/ixnetwork/topology?deepchild=*])): List of secondary (many to one) child layer protocols
StateCounts (dict(total:number,notStarted:number,down:number,up:number)): A list of values that indicates the total number of sessions, the number of sessions not started, the number of sessions down and the number of sessions that are up
Status (str(configured|error|mixed|notStarted|started|starting|stopping)): Running status of associated network element. Once in Started state, protocol sessions will begin to negotiate.
TablesPerChannel (number): Number of Tables per Channel
Returns:
self: This instance with matching openFlowChannel data retrieved from the server available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._select(locals())
def read(self, href):
"""Retrieves a single instance of openFlowChannel data from the server.
Args:
href (str): An href to the instance to be retrieved
Returns:
self: This instance with the openFlowChannel data from the server available through an iterator or index
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def GetAsynchronousConfiguration(self):
"""Executes the getAsynchronousConfiguration operation on the server.
Get Asynchronous Configurationr
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('GetAsynchronousConfiguration', payload=locals(), response_object=None)
def GetAsynchronousConfiguration(self, SessionIndices):
"""Executes the getAsynchronousConfiguration operation on the server.
Get Asynchronous Configurationr
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('GetAsynchronousConfiguration', payload=locals(), response_object=None)
def GetAsynchronousConfiguration(self, SessionIndices):
"""Executes the getAsynchronousConfiguration operation on the server.
Get Asynchronous Configurationr
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('GetAsynchronousConfiguration', payload=locals(), response_object=None)
def GetAsynchronousConfiguration(self, Arg2):
"""Executes the getAsynchronousConfiguration operation on the server.
Get Asynchronous Message configuration from Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('GetAsynchronousConfiguration', payload=locals(), response_object=None)
def InvokeSendRoleRequest(self, Arg2):
"""Executes the invokeSendRoleRequest operation on the server.
Sends a Role Request for selected Channel.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices in channel grid
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('InvokeSendRoleRequest', payload=locals(), response_object=None)
def PauseEchoReply(self):
"""Executes the pauseEchoReply operation on the server.
Pause Sending Echo Reply Messages
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('PauseEchoReply', payload=locals(), response_object=None)
def PauseEchoReply(self, SessionIndices):
"""Executes the pauseEchoReply operation on the server.
Pause Sending Echo Reply Messages
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('PauseEchoReply', payload=locals(), response_object=None)
def PauseEchoReply(self, SessionIndices):
"""Executes the pauseEchoReply operation on the server.
Pause Sending Echo Reply Messages
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('PauseEchoReply', payload=locals(), response_object=None)
def PauseEchoReply(self, Arg2):
"""Executes the pauseEchoReply operation on the server.
Pause Sending Echo Reply Messages.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('PauseEchoReply', payload=locals(), response_object=None)
def PauseEchoRequest(self):
"""Executes the pauseEchoRequest operation on the server.
Pause Sending Echo Request Messages
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('PauseEchoRequest', payload=locals(), response_object=None)
def PauseEchoRequest(self, SessionIndices):
"""Executes the pauseEchoRequest operation on the server.
Pause Sending Echo Request Messages
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('PauseEchoRequest', payload=locals(), response_object=None)
def PauseEchoRequest(self, SessionIndices):
"""Executes the pauseEchoRequest operation on the server.
Pause Sending Echo Request Messages
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('PauseEchoRequest', payload=locals(), response_object=None)
def PauseEchoRequest(self, Arg2):
"""Executes the pauseEchoRequest operation on the server.
Pause Sending Echo Request Messages.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('PauseEchoRequest', payload=locals(), response_object=None)
def RestartDown(self):
"""Executes the restartDown operation on the server.
Stop and start interfaces and sessions that are in Down state.
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('RestartDown', payload=locals(), response_object=None)
def RestartDown(self, SessionIndices):
"""Executes the restartDown operation on the server.
Stop and start interfaces and sessions that are in Down state.
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('RestartDown', payload=locals(), response_object=None)
def RestartDown(self, SessionIndices):
"""Executes the restartDown operation on the server.
Stop and start interfaces and sessions that are in Down state.
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('RestartDown', payload=locals(), response_object=None)
def ResumeEchoReply(self):
"""Executes the resumeEchoReply operation on the server.
Resume Sending Echo Reply Messages
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('ResumeEchoReply', payload=locals(), response_object=None)
def ResumeEchoReply(self, SessionIndices):
"""Executes the resumeEchoReply operation on the server.
Resume Sending Echo Reply Messages
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('ResumeEchoReply', payload=locals(), response_object=None)
def ResumeEchoReply(self, SessionIndices):
"""Executes the resumeEchoReply operation on the server.
Resume Sending Echo Reply Messages
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('ResumeEchoReply', payload=locals(), response_object=None)
def ResumeEchoReply(self, Arg2):
"""Executes the resumeEchoReply operation on the server.
Resume Sending Echo Reply Messages.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('ResumeEchoReply', payload=locals(), response_object=None)
def ResumeEchoRequest(self):
"""Executes the resumeEchoRequest operation on the server.
Resume Sending Echo Request Messages
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('ResumeEchoRequest', payload=locals(), response_object=None)
def ResumeEchoRequest(self, SessionIndices):
"""Executes the resumeEchoRequest operation on the server.
Resume Sending Echo Request Messages
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('ResumeEchoRequest', payload=locals(), response_object=None)
def ResumeEchoRequest(self, SessionIndices):
"""Executes the resumeEchoRequest operation on the server.
Resume Sending Echo Request Messages
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('ResumeEchoRequest', payload=locals(), response_object=None)
def ResumeEchoRequest(self, Arg2):
"""Executes the resumeEchoRequest operation on the server.
Resume Sending Echo Request Messages.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('ResumeEchoRequest', payload=locals(), response_object=None)
def SendBarrierRequest(self):
"""Executes the sendBarrierRequest operation on the server.
Send Barrier Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendBarrierRequest', payload=locals(), response_object=None)
def SendBarrierRequest(self, SessionIndices):
"""Executes the sendBarrierRequest operation on the server.
Send Barrier Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendBarrierRequest', payload=locals(), response_object=None)
def SendBarrierRequest(self, SessionIndices):
"""Executes the sendBarrierRequest operation on the server.
Send Barrier Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendBarrierRequest', payload=locals(), response_object=None)
def SendBarrierRequest(self, Arg2):
"""Executes the sendBarrierRequest operation on the server.
Send Barrier Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendBarrierRequest', payload=locals(), response_object=None)
def SendConfigRequest(self):
"""Executes the sendConfigRequest operation on the server.
Send Config Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendConfigRequest', payload=locals(), response_object=None)
def SendConfigRequest(self, SessionIndices):
"""Executes the sendConfigRequest operation on the server.
Send Config Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendConfigRequest', payload=locals(), response_object=None)
def SendConfigRequest(self, SessionIndices):
"""Executes the sendConfigRequest operation on the server.
Send Config Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendConfigRequest', payload=locals(), response_object=None)
def SendConfigRequest(self, Arg2):
"""Executes the sendConfigRequest operation on the server.
Send Config Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendConfigRequest', payload=locals(), response_object=None)
def SendDescriptionStatRequest(self):
"""Executes the sendDescriptionStatRequest operation on the server.
Send Description Stat Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendDescriptionStatRequest', payload=locals(), response_object=None)
def SendDescriptionStatRequest(self, SessionIndices):
"""Executes the sendDescriptionStatRequest operation on the server.
Send Description Stat Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendDescriptionStatRequest', payload=locals(), response_object=None)
def SendDescriptionStatRequest(self, SessionIndices):
"""Executes the sendDescriptionStatRequest operation on the server.
Send Description Stat Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendDescriptionStatRequest', payload=locals(), response_object=None)
def SendDescriptionStatRequest(self, Arg2):
"""Executes the sendDescriptionStatRequest operation on the server.
Send Description Stats Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendDescriptionStatRequest', payload=locals(), response_object=None)
def SendEchoRequest(self, EnableEchoTimeout, EchoTimeoutVal):
"""Executes the sendEchoRequest operation on the server.
Send Echo Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
EnableEchoTimeout (bool): This parameter requires a enableEchoTimeout of type kBool
EchoTimeoutVal (number): This parameter requires a echoTimeoutVal of type kInteger
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendEchoRequest', payload=locals(), response_object=None)
def SendEchoRequest(self, EnableEchoTimeout, EchoTimeoutVal, SessionIndices):
"""Executes the sendEchoRequest operation on the server.
Send Echo Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
EnableEchoTimeout (bool): This parameter requires a enableEchoTimeout of type kBool
EchoTimeoutVal (number): This parameter requires a echoTimeoutVal of type kInteger
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendEchoRequest', payload=locals(), response_object=None)
def SendEchoRequest(self, SessionIndices, EnableEchoTimeout, EchoTimeoutVal):
"""Executes the sendEchoRequest operation on the server.
Send Echo Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a enableEchoTimeout of type kBool
EnableEchoTimeout (bool): This parameter requires a echoTimeoutVal of type kInteger
EchoTimeoutVal (number): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendEchoRequest', payload=locals(), response_object=None)
def SendEchoRequest(self, Arg2, Arg3, Arg4):
"""Executes the sendEchoRequest operation on the server.
Send Echo Request Messages.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Arg3 (bool): Enable Echo Timeout
Arg4 (number): Echo Timeout Value
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendEchoRequest', payload=locals(), response_object=None)
def SendExperimenterMessage(self, Arg2, Arg3, Arg4, Arg5, Arg6):
"""Executes the sendExperimenterMessage operation on the server.
Send Experimenter Message.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Arg3 (number): Experimenter Data Length.
Arg4 (number): Experimenter ID.
Arg5 (number): Experimenter ID
Arg6 (str): Experimenter Data in Hex.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendExperimenterMessage', payload=locals(), response_object=None)
def SendExperimenterStatRequest(self, Arg2, Arg3, Arg4, Arg5, Arg6):
"""Executes the sendExperimenterStatRequest operation on the server.
Send Experimenter Stats Request.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Arg3 (number): Experimenter Data Length.
Arg4 (number): Experimenter ID.
Arg5 (number): Experimenter ID
Arg6 (str): Experimenter Data in Hex.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendExperimenterStatRequest', payload=locals(), response_object=None)
def SendFeatureRequest(self):
"""Executes the sendFeatureRequest operation on the server.
Send Feature Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendFeatureRequest', payload=locals(), response_object=None)
def SendFeatureRequest(self, SessionIndices):
"""Executes the sendFeatureRequest operation on the server.
Send Feature Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendFeatureRequest', payload=locals(), response_object=None)
def SendFeatureRequest(self, SessionIndices):
"""Executes the sendFeatureRequest operation on the server.
Send Feature Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendFeatureRequest', payload=locals(), response_object=None)
def SendFeatureRequest(self, Arg2):
"""Executes the sendFeatureRequest operation on the server.
Send Feature Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendFeatureRequest', payload=locals(), response_object=None)
def SendGetQueueConfigRequest(self, OutputPortType, ErrorUnsupportedTypeFormat):
"""Executes the sendGetQueueConfigRequest operation on the server.
Send Queue Stats Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
OutputPortType (str(enumOpt-MANUAL|enumOpt-OFPP_ANY)): This parameter requires a outputPortType of type kEnumValue=enumOpt-MANUAL,enumOpt-OFPP_ANY
ErrorUnsupportedTypeFormat (null): This parameter requires a errorUnsupportedTypeFormat of type kVoid
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendGetQueueConfigRequest', payload=locals(), response_object=None)
def SendGetQueueConfigRequest(self, OutputPortType, ErrorUnsupportedTypeFormat, SessionIndices):
"""Executes the sendGetQueueConfigRequest operation on the server.
Send Queue Stats Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
OutputPortType (str(enumOpt-MANUAL|enumOpt-OFPP_ANY)): This parameter requires a outputPortType of type kEnumValue=enumOpt-MANUAL,enumOpt-OFPP_ANY
ErrorUnsupportedTypeFormat (null): This parameter requires a errorUnsupportedTypeFormat of type kVoid
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendGetQueueConfigRequest', payload=locals(), response_object=None)
def SendGetQueueConfigRequest(self, SessionIndices, OutputPortType, ErrorUnsupportedTypeFormat):
"""Executes the sendGetQueueConfigRequest operation on the server.
Send Queue Stats Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a outputPortType of type kEnumValue=enumOpt-MANUAL,enumOpt-OFPP_ANY
OutputPortType (str(enumOpt-MANUAL|enumOpt-OFPP_ANY)): This parameter requires a errorUnsupportedTypeFormat of type kVoid
ErrorUnsupportedTypeFormat (null): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendGetQueueConfigRequest', payload=locals(), response_object=None)
def SendGetQueueConfigRequest(self, Arg2, Arg3, Arg4):
"""Executes the sendGetQueueConfigRequest operation on the server.
Send Queue Config Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Arg3 (str(mANUAL|oFPP_ALL|oFPP_ANY|oFPP_CONTROLLER|oFPP_FLOOD|oFPP_IN_PORT|oFPP_LOCAL|oFPP_NONE|oFPP_NORMAL|oFPP_TABLE)): Output Port Type
Arg4 (number): Port ID
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendGetQueueConfigRequest', payload=locals(), response_object=None)
def SendGroupDescriptionRequest(self):
"""Executes the sendGroupDescriptionRequest operation on the server.
Send Group Description Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendGroupDescriptionRequest', payload=locals(), response_object=None)
def SendGroupDescriptionRequest(self, SessionIndices):
"""Executes the sendGroupDescriptionRequest operation on the server.
Send Group Description Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendGroupDescriptionRequest', payload=locals(), response_object=None)
def SendGroupDescriptionRequest(self, SessionIndices):
"""Executes the sendGroupDescriptionRequest operation on the server.
Send Group Description Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendGroupDescriptionRequest', payload=locals(), response_object=None)
def SendGroupDescriptionRequest(self, Arg2):
"""Executes the sendGroupDescriptionRequest operation on the server.
Send Group Description Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendGroupDescriptionRequest', payload=locals(), response_object=None)
def SendGroupFeaturesRequest(self):
"""Executes the sendGroupFeaturesRequest operation on the server.
Send Group Features Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendGroupFeaturesRequest', payload=locals(), response_object=None)
def SendGroupFeaturesRequest(self, SessionIndices):
"""Executes the sendGroupFeaturesRequest operation on the server.
Send Group Features Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendGroupFeaturesRequest', payload=locals(), response_object=None)
def SendGroupFeaturesRequest(self, SessionIndices):
"""Executes the sendGroupFeaturesRequest operation on the server.
Send Group Features Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendGroupFeaturesRequest', payload=locals(), response_object=None)
def SendGroupFeaturesRequest(self, Arg2):
"""Executes the sendGroupFeaturesRequest operation on the server.
Send Group Features Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendGroupFeaturesRequest', payload=locals(), response_object=None)
def SendGroupStatsRequest(self, GroupIDType, GroupID):
"""Executes the sendGroupStatsRequest operation on the server.
Send Group Stats Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
GroupIDType (str(enumOpt-Manual|enumOpt-OFPG_ALL|enumOpt-OFPG_ANY)): This parameter requires a groupIDType of type kEnumValue=enumOpt-Manual,enumOpt-OFPG_ALL,enumOpt-OFPG_ANY
GroupID (number): This parameter requires a groupID of type kInteger
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendGroupStatsRequest', payload=locals(), response_object=None)
def SendGroupStatsRequest(self, GroupIDType, GroupID, SessionIndices):
"""Executes the sendGroupStatsRequest operation on the server.
Send Group Stats Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
GroupIDType (str(enumOpt-Manual|enumOpt-OFPG_ALL|enumOpt-OFPG_ANY)): This parameter requires a groupIDType of type kEnumValue=enumOpt-Manual,enumOpt-OFPG_ALL,enumOpt-OFPG_ANY
GroupID (number): This parameter requires a groupID of type kInteger
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendGroupStatsRequest', payload=locals(), response_object=None)
def SendGroupStatsRequest(self, SessionIndices, GroupIDType, GroupID):
"""Executes the sendGroupStatsRequest operation on the server.
Send Group Stats Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a groupIDType of type kEnumValue=enumOpt-Manual,enumOpt-OFPG_ALL,enumOpt-OFPG_ANY
GroupIDType (str(enumOpt-Manual|enumOpt-OFPG_ALL|enumOpt-OFPG_ANY)): This parameter requires a groupID of type kInteger
GroupID (number): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendGroupStatsRequest', payload=locals(), response_object=None)
def SendGroupStatsRequest(self, Arg2, Arg3, Arg4):
"""Executes the sendGroupStatsRequest operation on the server.
Send Group Stats Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Arg3 (str(manual|oFPG_ALL|oFPG_ANY)): Group ID Type
Arg4 (number): Group ID
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendGroupStatsRequest', payload=locals(), response_object=None)
def SendMeterConfigRequest(self, MeterIDType, ErrorUnsupportedTypeFormat):
"""Executes the sendMeterConfigRequest operation on the server.
Send Meter Config Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
MeterIDType (str(enumOpt-ALL|enumOpt-MANUAL|enumOpt-OFPM_CONTROLLER|enumOpt-OFPM_SLOWPATH)): This parameter requires a meterIDType of type kEnumValue=enumOpt-ALL,enumOpt-MANUAL,enumOpt-OFPM_CONTROLLER,enumOpt-OFPM_SLOWPATH
ErrorUnsupportedTypeFormat (null): This parameter requires a errorUnsupportedTypeFormat of type kVoid
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendMeterConfigRequest', payload=locals(), response_object=None)
def SendMeterConfigRequest(self, MeterIDType, ErrorUnsupportedTypeFormat, SessionIndices):
"""Executes the sendMeterConfigRequest operation on the server.
Send Meter Config Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
MeterIDType (str(enumOpt-ALL|enumOpt-MANUAL|enumOpt-OFPM_CONTROLLER|enumOpt-OFPM_SLOWPATH)): This parameter requires a meterIDType of type kEnumValue=enumOpt-ALL,enumOpt-MANUAL,enumOpt-OFPM_CONTROLLER,enumOpt-OFPM_SLOWPATH
ErrorUnsupportedTypeFormat (null): This parameter requires a errorUnsupportedTypeFormat of type kVoid
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendMeterConfigRequest', payload=locals(), response_object=None)
def SendMeterConfigRequest(self, SessionIndices, MeterIDType, ErrorUnsupportedTypeFormat):
"""Executes the sendMeterConfigRequest operation on the server.
Send Meter Config Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a meterIDType of type kEnumValue=enumOpt-ALL,enumOpt-MANUAL,enumOpt-OFPM_CONTROLLER,enumOpt-OFPM_SLOWPATH
MeterIDType (str(enumOpt-ALL|enumOpt-MANUAL|enumOpt-OFPM_CONTROLLER|enumOpt-OFPM_SLOWPATH)): This parameter requires a errorUnsupportedTypeFormat of type kVoid
ErrorUnsupportedTypeFormat (null): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendMeterConfigRequest', payload=locals(), response_object=None)
def SendMeterConfigRequest(self, Arg2, Arg3, Arg4):
"""Executes the sendMeterConfigRequest operation on the server.
Send Meter Config Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Arg3 (str(all|manual|oFPM_CONTROLLER|oFPM_SLOWPATH)): Meter ID Type
Arg4 (number): Meter ID
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendMeterConfigRequest', payload=locals(), response_object=None)
def SendMeterFeaturesRequest(self):
"""Executes the sendMeterFeaturesRequest operation on the server.
Send Meter Features Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendMeterFeaturesRequest', payload=locals(), response_object=None)
def SendMeterFeaturesRequest(self, SessionIndices):
"""Executes the sendMeterFeaturesRequest operation on the server.
Send Meter Features Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendMeterFeaturesRequest', payload=locals(), response_object=None)
def SendMeterFeaturesRequest(self, SessionIndices):
"""Executes the sendMeterFeaturesRequest operation on the server.
Send Meter Features Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendMeterFeaturesRequest', payload=locals(), response_object=None)
def SendMeterFeaturesRequest(self, Arg2):
"""Executes the sendMeterFeaturesRequest operation on the server.
Send Meter Features Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendMeterFeaturesRequest', payload=locals(), response_object=None)
def SendMeterStatRequest(self, Arg2, Arg3, Arg4):
"""Executes the sendMeterStatRequest operation on the server.
Send Meter Stat Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Arg3 (str(all|manual|oFPM_CONTROLLER|oFPM_SLOWPATH)): Meter ID Type
Arg4 (number): Meter ID
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendMeterStatRequest', payload=locals(), response_object=None)
def SendMeterStatsRequest(self, MeterIDType, ErrorUnsupportedTypeFormat):
"""Executes the sendMeterStatsRequest operation on the server.
Send Meter Stats Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
MeterIDType (str(enumOpt-ALL|enumOpt-MANUAL|enumOpt-OFPM_CONTROLLER|enumOpt-OFPM_SLOWPATH)): This parameter requires a meterIDType of type kEnumValue=enumOpt-ALL,enumOpt-MANUAL,enumOpt-OFPM_CONTROLLER,enumOpt-OFPM_SLOWPATH
ErrorUnsupportedTypeFormat (null): This parameter requires a errorUnsupportedTypeFormat of type kVoid
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendMeterStatsRequest', payload=locals(), response_object=None)
def SendMeterStatsRequest(self, MeterIDType, ErrorUnsupportedTypeFormat, SessionIndices):
"""Executes the sendMeterStatsRequest operation on the server.
Send Meter Stats Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
MeterIDType (str(enumOpt-ALL|enumOpt-MANUAL|enumOpt-OFPM_CONTROLLER|enumOpt-OFPM_SLOWPATH)): This parameter requires a meterIDType of type kEnumValue=enumOpt-ALL,enumOpt-MANUAL,enumOpt-OFPM_CONTROLLER,enumOpt-OFPM_SLOWPATH
ErrorUnsupportedTypeFormat (null): This parameter requires a errorUnsupportedTypeFormat of type kVoid
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendMeterStatsRequest', payload=locals(), response_object=None)
def SendMeterStatsRequest(self, SessionIndices, MeterIDType, ErrorUnsupportedTypeFormat):
"""Executes the sendMeterStatsRequest operation on the server.
Send Meter Stats Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a meterIDType of type kEnumValue=enumOpt-ALL,enumOpt-MANUAL,enumOpt-OFPM_CONTROLLER,enumOpt-OFPM_SLOWPATH
MeterIDType (str(enumOpt-ALL|enumOpt-MANUAL|enumOpt-OFPM_CONTROLLER|enumOpt-OFPM_SLOWPATH)): This parameter requires a errorUnsupportedTypeFormat of type kVoid
ErrorUnsupportedTypeFormat (null): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendMeterStatsRequest', payload=locals(), response_object=None)
def SendPortDescription(self):
"""Executes the sendPortDescription operation on the server.
Send Port Description
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendPortDescription', payload=locals(), response_object=None)
def SendPortDescription(self, SessionIndices):
"""Executes the sendPortDescription operation on the server.
Send Port Description
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendPortDescription', payload=locals(), response_object=None)
def SendPortDescription(self, SessionIndices):
"""Executes the sendPortDescription operation on the server.
Send Port Description
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendPortDescription', payload=locals(), response_object=None)
def SendPortDescription(self, Arg2):
"""Executes the sendPortDescription operation on the server.
Send Port Descrption to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendPortDescription', payload=locals(), response_object=None)
def SendPortStatsRequest(self, OutputPortType, ErrorUnsupportedTypeFormat):
"""Executes the sendPortStatsRequest operation on the server.
Send Port Stats Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
OutputPortType (str(enumOpt-MANUAL|enumOpt-OFPP_ANY|enumOpt-OFPP_NONE)): This parameter requires a outputPortType of type kEnumValue=enumOpt-MANUAL,enumOpt-OFPP_ANY,enumOpt-OFPP_NONE
ErrorUnsupportedTypeFormat (null): This parameter requires a errorUnsupportedTypeFormat of type kVoid
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendPortStatsRequest', payload=locals(), response_object=None)
def SendPortStatsRequest(self, OutputPortType, ErrorUnsupportedTypeFormat, SessionIndices):
"""Executes the sendPortStatsRequest operation on the server.
Send Port Stats Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
OutputPortType (str(enumOpt-MANUAL|enumOpt-OFPP_ANY|enumOpt-OFPP_NONE)): This parameter requires a outputPortType of type kEnumValue=enumOpt-MANUAL,enumOpt-OFPP_ANY,enumOpt-OFPP_NONE
ErrorUnsupportedTypeFormat (null): This parameter requires a errorUnsupportedTypeFormat of type kVoid
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendPortStatsRequest', payload=locals(), response_object=None)
def SendPortStatsRequest(self, SessionIndices, OutputPortType, ErrorUnsupportedTypeFormat):
"""Executes the sendPortStatsRequest operation on the server.
Send Port Stats Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a outputPortType of type kEnumValue=enumOpt-MANUAL,enumOpt-OFPP_ANY,enumOpt-OFPP_NONE
OutputPortType (str(enumOpt-MANUAL|enumOpt-OFPP_ANY|enumOpt-OFPP_NONE)): This parameter requires a errorUnsupportedTypeFormat of type kVoid
ErrorUnsupportedTypeFormat (null): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendPortStatsRequest', payload=locals(), response_object=None)
def SendPortStatsRequest(self, Arg2, Arg3, Arg4):
"""Executes the sendPortStatsRequest operation on the server.
Send Port Stats Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Arg3 (str(mANUAL|oFPP_ALL|oFPP_ANY|oFPP_CONTROLLER|oFPP_FLOOD|oFPP_IN_PORT|oFPP_LOCAL|oFPP_NONE|oFPP_NORMAL|oFPP_TABLE)): Output Port Type
Arg4 (number): Port ID
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendPortStatsRequest', payload=locals(), response_object=None)
def SendQueueStatsRequest(self, Arg2, Arg3, Arg4, Arg5, Arg6):
"""Executes the sendQueueStatsRequest operation on the server.
Send Queue Stat Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Arg3 (str(mANUAL|oFPP_ALL|oFPP_ANY|oFPP_CONTROLLER|oFPP_FLOOD|oFPP_IN_PORT|oFPP_LOCAL|oFPP_NONE|oFPP_NORMAL|oFPP_TABLE)): Output Port Type
Arg4 (number): Port ID
Arg5 (str(manual|oFPQ_ALL)): Queue Type
Arg6 (number): Queue ID
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendQueueStatsRequest', payload=locals(), response_object=None)
def SendTableModRequest(self, TableIdType, TableId, ErrorUnsupportedTypeFormat):
"""Executes the sendTableModRequest operation on the server.
Send Table Mod Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
TableIdType (str(enumOpt-ALL_TABLE|enumOpt-MANUAL)): This parameter requires a tableIdType of type kEnumValue=enumOpt-ALL_TABLE,enumOpt-MANUAL
TableId (number): This parameter requires a tableId of type kInteger
ErrorUnsupportedTypeFormat (null): This parameter requires a errorUnsupportedTypeFormat of type kVoid
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendTableModRequest', payload=locals(), response_object=None)
def SendTableModRequest(self, TableIdType, TableId, ErrorUnsupportedTypeFormat, SessionIndices):
"""Executes the sendTableModRequest operation on the server.
Send Table Mod Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
TableIdType (str(enumOpt-ALL_TABLE|enumOpt-MANUAL)): This parameter requires a tableIdType of type kEnumValue=enumOpt-ALL_TABLE,enumOpt-MANUAL
TableId (number): This parameter requires a tableId of type kInteger
ErrorUnsupportedTypeFormat (null): This parameter requires a errorUnsupportedTypeFormat of type kVoid
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendTableModRequest', payload=locals(), response_object=None)
def SendTableModRequest(self, SessionIndices, TableIdType, TableId, ErrorUnsupportedTypeFormat):
"""Executes the sendTableModRequest operation on the server.
Send Table Mod Request
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a tableIdType of type kEnumValue=enumOpt-ALL_TABLE,enumOpt-MANUAL
TableIdType (str(enumOpt-ALL_TABLE|enumOpt-MANUAL)): This parameter requires a tableId of type kInteger
TableId (number): This parameter requires a errorUnsupportedTypeFormat of type kVoid
ErrorUnsupportedTypeFormat (null): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendTableModRequest', payload=locals(), response_object=None)
def SendTableModRequest(self, Arg2, Arg3, Arg4, Arg5):
"""Executes the sendTableModRequest operation on the server.
Modify Behaviour of a Flow Table.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Arg3 (str(aLL_TABLE|manual)): Table ID Type
Arg4 (number): Table ID
Arg5 (number): Table Config
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendTableModRequest', payload=locals(), response_object=None)
def SendTableStatsRequest(self):
"""Executes the sendTableStatsRequest operation on the server.
Send Table Stats Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendTableStatsRequest', payload=locals(), response_object=None)
def SendTableStatsRequest(self, SessionIndices):
"""Executes the sendTableStatsRequest operation on the server.
Send Table Stats Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendTableStatsRequest', payload=locals(), response_object=None)
def SendTableStatsRequest(self, SessionIndices):
"""Executes the sendTableStatsRequest operation on the server.
Send Table Stats Request to Switch
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('SendTableStatsRequest', payload=locals(), response_object=None)
def SendTableStatsRequest(self, Arg2):
"""Executes the sendTableStatsRequest operation on the server.
Send Meter Features Request to Switch.
Args:
Arg1 (str(None|/api/v1/sessions/1/ixnetwork/topology)): The method internally sets Arg1 to the current href for this instance
Arg2 (list(number)): List of indices into the protocol plugin. An empty list indicates all instances in the plugin.
Returns:
list(str): ID to associate each async action invocation
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self.href
return self._execute('SendTableStatsRequest', payload=locals(), response_object=None)
def Start(self):
"""Executes the start operation on the server.
Start selected protocols.
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('Start', payload=locals(), response_object=None)
def Start(self, SessionIndices):
"""Executes the start operation on the server.
Start selected protocols.
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('Start', payload=locals(), response_object=None)
def Start(self, SessionIndices):
"""Executes the start operation on the server.
Start selected protocols.
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('Start', payload=locals(), response_object=None)
def StartChannel(self):
"""Executes the startChannel operation on the server.
Start OpenFlow Channel
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('StartChannel', payload=locals(), response_object=None)
def StartChannel(self, SessionIndices):
"""Executes the startChannel operation on the server.
Start OpenFlow Channel
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('StartChannel', payload=locals(), response_object=None)
def StartChannel(self, SessionIndices):
"""Executes the startChannel operation on the server.
Start OpenFlow Channel
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('StartChannel', payload=locals(), response_object=None)
def Stop(self):
"""Executes the stop operation on the server.
Stop selected protocols.
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('Stop', payload=locals(), response_object=None)
def Stop(self, SessionIndices):
"""Executes the stop operation on the server.
Stop selected protocols.
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('Stop', payload=locals(), response_object=None)
def Stop(self, SessionIndices):
"""Executes the stop operation on the server.
Stop selected protocols.
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('Stop', payload=locals(), response_object=None)
def StopChannel(self):
"""Executes the stopChannel operation on the server.
Stop OpenFlow Channel
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('StopChannel', payload=locals(), response_object=None)
def StopChannel(self, SessionIndices):
"""Executes the stopChannel operation on the server.
Stop OpenFlow Channel
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (list(number)): This parameter requires an array of session numbers 0 1 2 3
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('StopChannel', payload=locals(), response_object=None)
def StopChannel(self, SessionIndices):
"""Executes the stopChannel operation on the server.
Stop OpenFlow Channel
Args:
Arg1 (list(str[None|/api/v1/sessions/1/ixnetwork/topology])): The method internally sets Arg1 to the encapsulated list of hrefs for this instance
SessionIndices (str): This parameter requires a string of session numbers 1-4;6;7-12
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
Arg1 = self
return self._execute('StopChannel', payload=locals(), response_object=None)
| 41.88222
| 308
| 0.754339
| 11,867
| 92,811
| 5.864751
| 0.043145
| 0.042545
| 0.033507
| 0.019139
| 0.913531
| 0.907007
| 0.897122
| 0.890426
| 0.876173
| 0.867394
| 0
| 0.011911
| 0.174074
| 92,811
| 2,215
| 309
| 41.901129
| 0.896015
| 0.763002
| 0
| 0.609865
| 0
| 0
| 0.116801
| 0.050867
| 0
| 0
| 0
| 0
| 0
| 1
| 0.331839
| false
| 0
| 0.011211
| 0
| 0.659193
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 8
|
c5ce3f3c6180532213516c37152af69624315e9a
| 8,667
|
py
|
Python
|
tests/gcloud/test_GCloudStorage.py
|
urbandataanalytics/SwissKnife
|
46e2266744ff2a6e95c05817182b80d864e422a9
|
[
"MIT"
] | 3
|
2020-04-27T15:28:40.000Z
|
2020-05-27T10:33:16.000Z
|
tests/gcloud/test_GCloudStorage.py
|
urbandataanalytics/SwissKnife
|
46e2266744ff2a6e95c05817182b80d864e422a9
|
[
"MIT"
] | 7
|
2020-04-30T09:47:14.000Z
|
2021-04-05T13:07:12.000Z
|
tests/gcloud/test_GCloudStorage.py
|
urbandataanalytics/SwissKnife
|
46e2266744ff2a6e95c05817182b80d864e422a9
|
[
"MIT"
] | null | null | null |
import os
import imp
import unittest
import SwissKnife
import tests.test_utils as test_utils
from unittest.mock import MagicMock
from unittest import mock
from SwissKnife.gcloud.GCloudStorage import GCloudStorage
class TestGCloudStorage(unittest.TestCase):
def setUp(self):
self.bucket_path_env_value = 'gs://fancy-bucket/files-should/have-this-prefix'
self.bucket_name = 'fancy-bucket'
self.bucket_path_prefix = 'files-should/have-this-prefix'
self.bucket_path_param = 'gs://not-so-fancy-bucket/path/prefix-param'
self.bucket_name_param = 'not-so-fancy-bucket'
self.bucket_path_prefix_param = 'path/prefix-param'
test_utils.set_env_variable('BUCKET_PATH', self.bucket_path_env_value)
imp.reload(SwissKnife.gcloud.GCloudStorage) # needed to reload BUCKET_PATH
def GCloudStorage_raises_an_exception_when_bucket_path_is_not_defined(self):
test_utils.set_env_variable('BUCKET_PATH', None)
imp.reload(SwissKnife.gcloud.GCloudStorage) # needed to reload BUCKET_PATH
with self.assertRaises(RuntimeError) as ex:
gcs = GCloudStorage()
self.assertTrue("invalid BUCKET PATH" in str(ex.exception))
@mock.patch('SwissKnife.gcloud.GCloudStorage.gcloud')
def test_path_complete_without_bucket_with_prefix(self, mock_gcloud):
file_path = 'random/path'
file_name = 'blah.gif'
expected = f'gs://{self.bucket_path_prefix}/{file_path}/{file_name}'
gcs = GCloudStorage()
result = gcs.get_storage_complete_file_path(file_name, file_path)
self.assertEqual(expected, result)
@mock.patch('SwissKnife.gcloud.GCloudStorage.gcloud')
def test_path_complete_with_bucket_with_prefix(self, mock_gcloud):
file_path = 'random/path'
file_name = 'blah.gif'
expected = f'gs://fancy-bucket/{self.bucket_path_prefix}/{file_path}/{file_name}'
gcs = GCloudStorage()
result = gcs.get_storage_complete_file_path(file_name, file_path, with_bucket=True)
self.assertEqual(expected, result)
@mock.patch('SwissKnife.gcloud.GCloudStorage.gcloud')
def test_path_complete_with_bucket_without_prefix(self, mock_gcloud):
file_path = 'random/path'
file_name = 'blah.gif'
expected = f'gs://fancy-bucket/{file_path}/{file_name}'
gcs = GCloudStorage()
result = gcs.get_storage_complete_file_path(file_name,
file_path,
with_bucket=True,
with_prefix=False)
self.assertEqual(expected, result)
@mock.patch('SwissKnife.gcloud.GCloudStorage.gcloud')
def test_path_without_file_name(self, mock_gcloud):
file_path = 'random/path'
expected = f'gs://fancy-bucket/{file_path}/'
gcs = GCloudStorage()
result = gcs.get_storage_complete_file_path(
file_path=file_path,
with_bucket=True,
with_prefix=False
)
self.assertEqual(expected, result)
@mock.patch('SwissKnife.gcloud.GCloudStorage.gcloud')
def test_correct_data_type_to_upload(self, mock_gcloud):
gc = GCloudStorage()
file_path = 'random/path'
file_name = 'blah.gif'
expected = f'gs://fancy-bucket/{self.bucket_path_prefix}/{file_path}/{file_name}'
data_types = ["str", "file"]
metadata = {'key': 'test'}
for dt in data_types:
self.assertEqual(expected, gc.save_to_storage("data",
None,
dt,
file_path,
file_name,
metadata=metadata))
self.assertEqual(metadata, mock_gcloud.Client().get_bucket().blob().metadata)
@mock.patch('SwissKnife.gcloud.GCloudStorage.gcloud')
def test_correct_data_type_to_upload_with_bucket_param(self, mock_gcloud):
gc = GCloudStorage(bucket=self.bucket_path_param)
file_path = 'random/path'
file_name = 'blah.gif'
expected = f'gs://{self.bucket_name_param}/{self.bucket_path_prefix_param}/{file_path}/{file_name}'
data_types = ["str", "file"]
metadata = {'key': 'test'}
for dt in data_types:
self.assertEqual(expected, gc.save_to_storage("data",
None,
dt,
file_path,
file_name,
metadata=metadata))
self.assertEqual(metadata, mock_gcloud.Client().get_bucket().blob().metadata)
@mock.patch('SwissKnife.gcloud.GCloudStorage.gcloud')
def test_incorrect_data_type_to_upload(self, mock_gcloud):
gc = GCloudStorage()
file_path = 'random/path'
file_name = 'blah.gif'
wrong_data_types = ["string", "files", None, "gzip", "image"]
with self.assertRaises(NotImplementedError):
for dt in wrong_data_types:
gc.save_to_storage("data",
None,
dt,
file_path,
file_name)
@mock.patch('SwissKnife.gcloud.GCloudStorage.gcloud')
def test_incorrect_data_type_to_upload_with_bucket_param(self, mock_gcloud):
gc = GCloudStorage(bucket=self.bucket_path_param)
file_path = 'random/path'
file_name = 'blah.gif'
wrong_data_types = ["string", "files", None, "gzip", "image"]
with self.assertRaises(NotImplementedError):
for dt in wrong_data_types:
gc.save_to_storage("data",
None,
dt,
file_path,
file_name)
@mock.patch('SwissKnife.gcloud.GCloudStorage.gcloud')
def test_valid_path_in_list_blobs_with_prefix(self, mock_gcloud):
storage_path = 'random_path'
expected = f'{self.bucket_path_prefix}/{storage_path}/'
def assert_path_in_list_blobs(prefix):
self.assertEqual(prefix, expected)
mocked_bucket = MagicMock()
mocked_bucket.list_blobs = assert_path_in_list_blobs
gc = GCloudStorage()
gc.bucket = mocked_bucket
gc.list_blobs(storage_path)
@mock.patch('SwissKnife.gcloud.GCloudStorage.gcloud')
def test_valid_path_in_list_blobs_with_prefix_with_bucket_param(self, mock_gcloud):
storage_path = 'random_path'
expected = f'{self.bucket_path_prefix_param}/{storage_path}/'
def assert_path_in_list_blobs(prefix):
self.assertEqual(prefix, expected)
mocked_bucket = MagicMock()
mocked_bucket.list_blobs = assert_path_in_list_blobs
gc = GCloudStorage(bucket=self.bucket_path_param)
gc.bucket = mocked_bucket
gc.list_blobs(storage_path)
@mock.patch('SwissKnife.gcloud.GCloudStorage.gcloud')
def test_valid_path_in_list_blobs_without_prefix(self, mock_gcloud):
storage_path = 'random_path'
expected = f'{storage_path}/'
def assert_path_in_list_blobs(prefix):
self.assertEqual(prefix, expected)
mocked_bucket = MagicMock()
mocked_bucket.list_blobs = assert_path_in_list_blobs
gc = GCloudStorage()
gc.bucket = mocked_bucket
gc.list_blobs(storage_path, with_prefix=False)
@mock.patch('SwissKnife.gcloud.GCloudStorage.gcloud')
def test_valid_path_in_list_blobs_without_prefix_with_bucket_param(self, mock_gcloud):
storage_path = 'random_path'
expected = f'{storage_path}/'
def assert_path_in_list_blobs(prefix):
self.assertEqual(prefix, expected)
mocked_bucket = MagicMock()
mocked_bucket.list_blobs = assert_path_in_list_blobs
gc = GCloudStorage(bucket=self.bucket_path_param)
gc.bucket = mocked_bucket
gc.list_blobs(storage_path, with_prefix=False)
"""
The rest of methods are not tested in this class, as they are mere
rewrittings of code provided by Google. This tests would require,
as well, to have a working internet connection and a configured SA.
"""
| 39.940092
| 107
| 0.611169
| 968
| 8,667
| 5.149793
| 0.121901
| 0.04333
| 0.045737
| 0.060181
| 0.861184
| 0.843932
| 0.831494
| 0.791775
| 0.791775
| 0.791775
| 0
| 0
| 0.298719
| 8,667
| 216
| 108
| 40.125
| 0.820171
| 0.006577
| 0
| 0.713415
| 0
| 0
| 0.166906
| 0.120191
| 0
| 0
| 0
| 0
| 0.146341
| 1
| 0.109756
| false
| 0
| 0.04878
| 0
| 0.164634
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
764d33752a0c10e1a5835a028ea67466c05963df
| 200
|
py
|
Python
|
1_3.py
|
JeffreyAsuncion/PCEP_training_2020_12
|
7477fb57a526ca0efdd156811aa72fae6129b062
|
[
"MIT"
] | null | null | null |
1_3.py
|
JeffreyAsuncion/PCEP_training_2020_12
|
7477fb57a526ca0efdd156811aa72fae6129b062
|
[
"MIT"
] | null | null | null |
1_3.py
|
JeffreyAsuncion/PCEP_training_2020_12
|
7477fb57a526ca0efdd156811aa72fae6129b062
|
[
"MIT"
] | null | null | null |
print(2**3)
print(2**3.)
print(2.**3)
print(2.**3.)
print(5//2)
print(2**2**3)
print(2*4)
print(2**4)
print(2.*4)
print(2**4.)
print(2/4)
print(2//4)
print(-2/4)
print(-2//4)
print(2%4)
print(2%-4)
| 10.526316
| 14
| 0.565
| 49
| 200
| 2.306122
| 0.102041
| 0.79646
| 0.619469
| 0.955752
| 0.911504
| 0.911504
| 0.911504
| 0.911504
| 0.911504
| 0.911504
| 0
| 0.181319
| 0.09
| 200
| 19
| 15
| 10.526316
| 0.43956
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 12
|
765a0b4d9fcfa1d7cf208ebd477dd2f46f1b5325
| 119
|
py
|
Python
|
tests/parser/filter.2.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/filter.2.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/filter.2.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
p(2) | f.
-p(1) :- true.
true.
:- f.
"""
output = """
p(2) | f.
-p(1) :- true.
true.
:- f.
"""
| 7.933333
| 15
| 0.327731
| 18
| 119
| 2.166667
| 0.388889
| 0.102564
| 0.153846
| 0.205128
| 0.717949
| 0.717949
| 0.717949
| 0.717949
| 0
| 0
| 0
| 0.049383
| 0.319328
| 119
| 14
| 16
| 8.5
| 0.432099
| 0
| 0
| 0.833333
| 0
| 0
| 0.715596
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7666f33fbe7bd96d12e2aed6199ba1620c19ccbd
| 3,151
|
py
|
Python
|
tests/test_utils/test_get_labels_from_catalog.py
|
delmalih/few-shots-classification
|
8b06ff673882fd0d8b99cd705e5e5fab0ec93fb3
|
[
"MIT"
] | null | null | null |
tests/test_utils/test_get_labels_from_catalog.py
|
delmalih/few-shots-classification
|
8b06ff673882fd0d8b99cd705e5e5fab0ec93fb3
|
[
"MIT"
] | null | null | null |
tests/test_utils/test_get_labels_from_catalog.py
|
delmalih/few-shots-classification
|
8b06ff673882fd0d8b99cd705e5e5fab0ec93fb3
|
[
"MIT"
] | null | null | null |
##########################
# Imports
##########################
import os
import shutil
from few_shots_clf.utils import get_labels_from_catalog
from tests import empty_dir
from tests import build_catalog
from tests import delete_catalog
from tests.test_utils import TEST_DIRECTORY_PATH
##########################
# Function
##########################
def test_empty_folder():
"""[summary]
"""
# Empty dir
empty_dir(TEST_DIRECTORY_PATH)
# Get catalog_path
catalog_path = os.path.join(TEST_DIRECTORY_PATH, "catalog")
os.makedirs(catalog_path)
# Get label_path
label_name = "label"
label_path = os.path.join(catalog_path, label_name)
os.makedirs(label_path)
# Get all labels from catalog
labels = get_labels_from_catalog(catalog_path)
# Assert
assert len(labels) == 0
# Delete
shutil.rmtree(label_path)
shutil.rmtree(catalog_path)
shutil.rmtree(TEST_DIRECTORY_PATH)
def test_folder_with_no_images():
"""[summary]
"""
# Empty dir
empty_dir(TEST_DIRECTORY_PATH)
# Get catalog_path
catalog_path = os.path.join(TEST_DIRECTORY_PATH, "catalog")
os.makedirs(catalog_path)
# Get label_path
label_name = "label"
label_path = os.path.join(catalog_path, label_name)
os.makedirs(label_path)
# Add a file
file_path = os.path.join(label_path, "tmp.txt")
with open(file_path, "w") as tmp_file:
tmp_file.write("test")
# Get all labels from catalog
labels = get_labels_from_catalog(catalog_path)
# Assert
assert len(labels) == 0
# Delete
os.remove(file_path)
shutil.rmtree(label_path)
shutil.rmtree(catalog_path)
shutil.rmtree(TEST_DIRECTORY_PATH)
def test_folder_with_one_label():
"""[summary]
"""
# Empty dir
empty_dir(TEST_DIRECTORY_PATH)
# Build catalog
nb_labels = 1
catalog_path, label_paths, img_paths = build_catalog(TEST_DIRECTORY_PATH,
nb_labels=nb_labels)
# Get all labels from catalog
labels = get_labels_from_catalog(catalog_path)
# Assert
assert len(labels) == nb_labels
for label_path in label_paths:
label_name = label_path.split("/")[-1]
assert label_name in labels
# Delete
delete_catalog(TEST_DIRECTORY_PATH,
catalog_path,
label_paths,
img_paths)
def test_folder_with_multiple_labels():
"""[summary]
"""
# Empty dir
empty_dir(TEST_DIRECTORY_PATH)
# Build catalog
nb_labels = 10
catalog_path, label_paths, img_paths = build_catalog(TEST_DIRECTORY_PATH,
nb_labels=nb_labels)
# Get all labels from catalog
labels = get_labels_from_catalog(catalog_path)
# Assert
assert len(labels) == nb_labels
for label_path in label_paths:
label_name = label_path.split("/")[-1]
assert label_name in labels
# Delete
delete_catalog(TEST_DIRECTORY_PATH,
catalog_path,
label_paths,
img_paths)
| 23.691729
| 77
| 0.628372
| 387
| 3,151
| 4.777778
| 0.144703
| 0.107085
| 0.119524
| 0.054083
| 0.801514
| 0.801514
| 0.801514
| 0.801514
| 0.801514
| 0.801514
| 0
| 0.003009
| 0.261822
| 3,151
| 132
| 78
| 23.871212
| 0.791917
| 0.123136
| 0
| 0.730159
| 0
| 0
| 0.014537
| 0
| 0
| 0
| 0
| 0
| 0.095238
| 1
| 0.063492
| false
| 0
| 0.111111
| 0
| 0.174603
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
766b8db3208ca66807b4fbb1ce0a9c90b9e5ca3c
| 57,082
|
py
|
Python
|
sdk/python/pulumi_okta/deprecated/saml_idp.py
|
pulumi/pulumi-okta
|
83f7617a85b3d05213901773fa4e6a151ab6076b
|
[
"ECL-2.0",
"Apache-2.0"
] | 5
|
2019-10-29T21:59:22.000Z
|
2021-11-08T12:00:24.000Z
|
sdk/python/pulumi_okta/deprecated/saml_idp.py
|
pulumi/pulumi-okta
|
83f7617a85b3d05213901773fa4e6a151ab6076b
|
[
"ECL-2.0",
"Apache-2.0"
] | 109
|
2020-01-06T10:28:09.000Z
|
2022-03-25T19:52:40.000Z
|
sdk/python/pulumi_okta/deprecated/saml_idp.py
|
pulumi/pulumi-okta
|
83f7617a85b3d05213901773fa4e6a151ab6076b
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2020-09-11T16:31:04.000Z
|
2020-11-24T12:23:17.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['SamlIdpArgs', 'SamlIdp']
@pulumi.input_type
class SamlIdpArgs:
def __init__(__self__, *,
issuer: pulumi.Input[str],
kid: pulumi.Input[str],
sso_url: pulumi.Input[str],
account_link_action: Optional[pulumi.Input[str]] = None,
account_link_group_includes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
acs_binding: Optional[pulumi.Input[str]] = None,
acs_type: Optional[pulumi.Input[str]] = None,
deprovisioned_action: Optional[pulumi.Input[str]] = None,
groups_action: Optional[pulumi.Input[str]] = None,
groups_assignments: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
groups_attribute: Optional[pulumi.Input[str]] = None,
groups_filters: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
issuer_mode: Optional[pulumi.Input[str]] = None,
max_clock_skew: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
name_format: Optional[pulumi.Input[str]] = None,
profile_master: Optional[pulumi.Input[bool]] = None,
provisioning_action: Optional[pulumi.Input[str]] = None,
request_signature_algorithm: Optional[pulumi.Input[str]] = None,
request_signature_scope: Optional[pulumi.Input[str]] = None,
response_signature_algorithm: Optional[pulumi.Input[str]] = None,
response_signature_scope: Optional[pulumi.Input[str]] = None,
sso_binding: Optional[pulumi.Input[str]] = None,
sso_destination: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
subject_filter: Optional[pulumi.Input[str]] = None,
subject_formats: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
subject_match_attribute: Optional[pulumi.Input[str]] = None,
subject_match_type: Optional[pulumi.Input[str]] = None,
suspended_action: Optional[pulumi.Input[str]] = None,
username_template: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a SamlIdp resource.
:param pulumi.Input[str] issuer_mode: Indicates whether Okta uses the original Okta org domain URL, or a custom domain URL
:param pulumi.Input[str] name: name of idp
:param pulumi.Input[str] request_signature_algorithm: algorithm to use to sign requests
:param pulumi.Input[str] request_signature_scope: algorithm to use to sign response
:param pulumi.Input[str] response_signature_algorithm: algorithm to use to sign requests
:param pulumi.Input[str] response_signature_scope: algorithm to use to sign response
"""
pulumi.set(__self__, "issuer", issuer)
pulumi.set(__self__, "kid", kid)
pulumi.set(__self__, "sso_url", sso_url)
if account_link_action is not None:
pulumi.set(__self__, "account_link_action", account_link_action)
if account_link_group_includes is not None:
pulumi.set(__self__, "account_link_group_includes", account_link_group_includes)
if acs_binding is not None:
warnings.warn("""This property will be removed in the future, as it can only be set to 'HTTP-POST'""", DeprecationWarning)
pulumi.log.warn("""acs_binding is deprecated: This property will be removed in the future, as it can only be set to 'HTTP-POST'""")
if acs_binding is not None:
pulumi.set(__self__, "acs_binding", acs_binding)
if acs_type is not None:
pulumi.set(__self__, "acs_type", acs_type)
if deprovisioned_action is not None:
pulumi.set(__self__, "deprovisioned_action", deprovisioned_action)
if groups_action is not None:
pulumi.set(__self__, "groups_action", groups_action)
if groups_assignments is not None:
pulumi.set(__self__, "groups_assignments", groups_assignments)
if groups_attribute is not None:
pulumi.set(__self__, "groups_attribute", groups_attribute)
if groups_filters is not None:
pulumi.set(__self__, "groups_filters", groups_filters)
if issuer_mode is not None:
pulumi.set(__self__, "issuer_mode", issuer_mode)
if max_clock_skew is not None:
pulumi.set(__self__, "max_clock_skew", max_clock_skew)
if name is not None:
pulumi.set(__self__, "name", name)
if name_format is not None:
pulumi.set(__self__, "name_format", name_format)
if profile_master is not None:
pulumi.set(__self__, "profile_master", profile_master)
if provisioning_action is not None:
pulumi.set(__self__, "provisioning_action", provisioning_action)
if request_signature_algorithm is not None:
pulumi.set(__self__, "request_signature_algorithm", request_signature_algorithm)
if request_signature_scope is not None:
pulumi.set(__self__, "request_signature_scope", request_signature_scope)
if response_signature_algorithm is not None:
pulumi.set(__self__, "response_signature_algorithm", response_signature_algorithm)
if response_signature_scope is not None:
pulumi.set(__self__, "response_signature_scope", response_signature_scope)
if sso_binding is not None:
pulumi.set(__self__, "sso_binding", sso_binding)
if sso_destination is not None:
pulumi.set(__self__, "sso_destination", sso_destination)
if status is not None:
pulumi.set(__self__, "status", status)
if subject_filter is not None:
pulumi.set(__self__, "subject_filter", subject_filter)
if subject_formats is not None:
pulumi.set(__self__, "subject_formats", subject_formats)
if subject_match_attribute is not None:
pulumi.set(__self__, "subject_match_attribute", subject_match_attribute)
if subject_match_type is not None:
pulumi.set(__self__, "subject_match_type", subject_match_type)
if suspended_action is not None:
pulumi.set(__self__, "suspended_action", suspended_action)
if username_template is not None:
pulumi.set(__self__, "username_template", username_template)
@property
@pulumi.getter
def issuer(self) -> pulumi.Input[str]:
return pulumi.get(self, "issuer")
@issuer.setter
def issuer(self, value: pulumi.Input[str]):
pulumi.set(self, "issuer", value)
@property
@pulumi.getter
def kid(self) -> pulumi.Input[str]:
return pulumi.get(self, "kid")
@kid.setter
def kid(self, value: pulumi.Input[str]):
pulumi.set(self, "kid", value)
@property
@pulumi.getter(name="ssoUrl")
def sso_url(self) -> pulumi.Input[str]:
return pulumi.get(self, "sso_url")
@sso_url.setter
def sso_url(self, value: pulumi.Input[str]):
pulumi.set(self, "sso_url", value)
@property
@pulumi.getter(name="accountLinkAction")
def account_link_action(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "account_link_action")
@account_link_action.setter
def account_link_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_link_action", value)
@property
@pulumi.getter(name="accountLinkGroupIncludes")
def account_link_group_includes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "account_link_group_includes")
@account_link_group_includes.setter
def account_link_group_includes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "account_link_group_includes", value)
@property
@pulumi.getter(name="acsBinding")
def acs_binding(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "acs_binding")
@acs_binding.setter
def acs_binding(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "acs_binding", value)
@property
@pulumi.getter(name="acsType")
def acs_type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "acs_type")
@acs_type.setter
def acs_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "acs_type", value)
@property
@pulumi.getter(name="deprovisionedAction")
def deprovisioned_action(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "deprovisioned_action")
@deprovisioned_action.setter
def deprovisioned_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deprovisioned_action", value)
@property
@pulumi.getter(name="groupsAction")
def groups_action(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "groups_action")
@groups_action.setter
def groups_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "groups_action", value)
@property
@pulumi.getter(name="groupsAssignments")
def groups_assignments(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "groups_assignments")
@groups_assignments.setter
def groups_assignments(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "groups_assignments", value)
@property
@pulumi.getter(name="groupsAttribute")
def groups_attribute(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "groups_attribute")
@groups_attribute.setter
def groups_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "groups_attribute", value)
@property
@pulumi.getter(name="groupsFilters")
def groups_filters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "groups_filters")
@groups_filters.setter
def groups_filters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "groups_filters", value)
@property
@pulumi.getter(name="issuerMode")
def issuer_mode(self) -> Optional[pulumi.Input[str]]:
"""
Indicates whether Okta uses the original Okta org domain URL, or a custom domain URL
"""
return pulumi.get(self, "issuer_mode")
@issuer_mode.setter
def issuer_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "issuer_mode", value)
@property
@pulumi.getter(name="maxClockSkew")
def max_clock_skew(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_clock_skew")
@max_clock_skew.setter
def max_clock_skew(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_clock_skew", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
name of idp
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nameFormat")
def name_format(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name_format")
@name_format.setter
def name_format(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name_format", value)
@property
@pulumi.getter(name="profileMaster")
def profile_master(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "profile_master")
@profile_master.setter
def profile_master(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "profile_master", value)
@property
@pulumi.getter(name="provisioningAction")
def provisioning_action(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "provisioning_action")
@provisioning_action.setter
def provisioning_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_action", value)
@property
@pulumi.getter(name="requestSignatureAlgorithm")
def request_signature_algorithm(self) -> Optional[pulumi.Input[str]]:
"""
algorithm to use to sign requests
"""
return pulumi.get(self, "request_signature_algorithm")
@request_signature_algorithm.setter
def request_signature_algorithm(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "request_signature_algorithm", value)
@property
@pulumi.getter(name="requestSignatureScope")
def request_signature_scope(self) -> Optional[pulumi.Input[str]]:
"""
algorithm to use to sign response
"""
return pulumi.get(self, "request_signature_scope")
@request_signature_scope.setter
def request_signature_scope(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "request_signature_scope", value)
@property
@pulumi.getter(name="responseSignatureAlgorithm")
def response_signature_algorithm(self) -> Optional[pulumi.Input[str]]:
"""
algorithm to use to sign requests
"""
return pulumi.get(self, "response_signature_algorithm")
@response_signature_algorithm.setter
def response_signature_algorithm(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "response_signature_algorithm", value)
@property
@pulumi.getter(name="responseSignatureScope")
def response_signature_scope(self) -> Optional[pulumi.Input[str]]:
"""
algorithm to use to sign response
"""
return pulumi.get(self, "response_signature_scope")
@response_signature_scope.setter
def response_signature_scope(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "response_signature_scope", value)
@property
@pulumi.getter(name="ssoBinding")
def sso_binding(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "sso_binding")
@sso_binding.setter
def sso_binding(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sso_binding", value)
@property
@pulumi.getter(name="ssoDestination")
def sso_destination(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "sso_destination")
@sso_destination.setter
def sso_destination(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sso_destination", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="subjectFilter")
def subject_filter(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "subject_filter")
@subject_filter.setter
def subject_filter(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subject_filter", value)
@property
@pulumi.getter(name="subjectFormats")
def subject_formats(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "subject_formats")
@subject_formats.setter
def subject_formats(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "subject_formats", value)
@property
@pulumi.getter(name="subjectMatchAttribute")
def subject_match_attribute(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "subject_match_attribute")
@subject_match_attribute.setter
def subject_match_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subject_match_attribute", value)
@property
@pulumi.getter(name="subjectMatchType")
def subject_match_type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "subject_match_type")
@subject_match_type.setter
def subject_match_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subject_match_type", value)
@property
@pulumi.getter(name="suspendedAction")
def suspended_action(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "suspended_action")
@suspended_action.setter
def suspended_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "suspended_action", value)
@property
@pulumi.getter(name="usernameTemplate")
def username_template(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "username_template")
@username_template.setter
def username_template(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "username_template", value)
@pulumi.input_type
class _SamlIdpState:
def __init__(__self__, *,
account_link_action: Optional[pulumi.Input[str]] = None,
account_link_group_includes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
acs_binding: Optional[pulumi.Input[str]] = None,
acs_type: Optional[pulumi.Input[str]] = None,
audience: Optional[pulumi.Input[str]] = None,
deprovisioned_action: Optional[pulumi.Input[str]] = None,
groups_action: Optional[pulumi.Input[str]] = None,
groups_assignments: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
groups_attribute: Optional[pulumi.Input[str]] = None,
groups_filters: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
issuer: Optional[pulumi.Input[str]] = None,
issuer_mode: Optional[pulumi.Input[str]] = None,
kid: Optional[pulumi.Input[str]] = None,
max_clock_skew: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
name_format: Optional[pulumi.Input[str]] = None,
profile_master: Optional[pulumi.Input[bool]] = None,
provisioning_action: Optional[pulumi.Input[str]] = None,
request_signature_algorithm: Optional[pulumi.Input[str]] = None,
request_signature_scope: Optional[pulumi.Input[str]] = None,
response_signature_algorithm: Optional[pulumi.Input[str]] = None,
response_signature_scope: Optional[pulumi.Input[str]] = None,
sso_binding: Optional[pulumi.Input[str]] = None,
sso_destination: Optional[pulumi.Input[str]] = None,
sso_url: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
subject_filter: Optional[pulumi.Input[str]] = None,
subject_formats: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
subject_match_attribute: Optional[pulumi.Input[str]] = None,
subject_match_type: Optional[pulumi.Input[str]] = None,
suspended_action: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
username_template: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering SamlIdp resources.
:param pulumi.Input[str] issuer_mode: Indicates whether Okta uses the original Okta org domain URL, or a custom domain URL
:param pulumi.Input[str] name: name of idp
:param pulumi.Input[str] request_signature_algorithm: algorithm to use to sign requests
:param pulumi.Input[str] request_signature_scope: algorithm to use to sign response
:param pulumi.Input[str] response_signature_algorithm: algorithm to use to sign requests
:param pulumi.Input[str] response_signature_scope: algorithm to use to sign response
"""
if account_link_action is not None:
pulumi.set(__self__, "account_link_action", account_link_action)
if account_link_group_includes is not None:
pulumi.set(__self__, "account_link_group_includes", account_link_group_includes)
if acs_binding is not None:
warnings.warn("""This property will be removed in the future, as it can only be set to 'HTTP-POST'""", DeprecationWarning)
pulumi.log.warn("""acs_binding is deprecated: This property will be removed in the future, as it can only be set to 'HTTP-POST'""")
if acs_binding is not None:
pulumi.set(__self__, "acs_binding", acs_binding)
if acs_type is not None:
pulumi.set(__self__, "acs_type", acs_type)
if audience is not None:
pulumi.set(__self__, "audience", audience)
if deprovisioned_action is not None:
pulumi.set(__self__, "deprovisioned_action", deprovisioned_action)
if groups_action is not None:
pulumi.set(__self__, "groups_action", groups_action)
if groups_assignments is not None:
pulumi.set(__self__, "groups_assignments", groups_assignments)
if groups_attribute is not None:
pulumi.set(__self__, "groups_attribute", groups_attribute)
if groups_filters is not None:
pulumi.set(__self__, "groups_filters", groups_filters)
if issuer is not None:
pulumi.set(__self__, "issuer", issuer)
if issuer_mode is not None:
pulumi.set(__self__, "issuer_mode", issuer_mode)
if kid is not None:
pulumi.set(__self__, "kid", kid)
if max_clock_skew is not None:
pulumi.set(__self__, "max_clock_skew", max_clock_skew)
if name is not None:
pulumi.set(__self__, "name", name)
if name_format is not None:
pulumi.set(__self__, "name_format", name_format)
if profile_master is not None:
pulumi.set(__self__, "profile_master", profile_master)
if provisioning_action is not None:
pulumi.set(__self__, "provisioning_action", provisioning_action)
if request_signature_algorithm is not None:
pulumi.set(__self__, "request_signature_algorithm", request_signature_algorithm)
if request_signature_scope is not None:
pulumi.set(__self__, "request_signature_scope", request_signature_scope)
if response_signature_algorithm is not None:
pulumi.set(__self__, "response_signature_algorithm", response_signature_algorithm)
if response_signature_scope is not None:
pulumi.set(__self__, "response_signature_scope", response_signature_scope)
if sso_binding is not None:
pulumi.set(__self__, "sso_binding", sso_binding)
if sso_destination is not None:
pulumi.set(__self__, "sso_destination", sso_destination)
if sso_url is not None:
pulumi.set(__self__, "sso_url", sso_url)
if status is not None:
pulumi.set(__self__, "status", status)
if subject_filter is not None:
pulumi.set(__self__, "subject_filter", subject_filter)
if subject_formats is not None:
pulumi.set(__self__, "subject_formats", subject_formats)
if subject_match_attribute is not None:
pulumi.set(__self__, "subject_match_attribute", subject_match_attribute)
if subject_match_type is not None:
pulumi.set(__self__, "subject_match_type", subject_match_type)
if suspended_action is not None:
pulumi.set(__self__, "suspended_action", suspended_action)
if type is not None:
pulumi.set(__self__, "type", type)
if username_template is not None:
pulumi.set(__self__, "username_template", username_template)
@property
@pulumi.getter(name="accountLinkAction")
def account_link_action(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "account_link_action")
@account_link_action.setter
def account_link_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "account_link_action", value)
@property
@pulumi.getter(name="accountLinkGroupIncludes")
def account_link_group_includes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "account_link_group_includes")
@account_link_group_includes.setter
def account_link_group_includes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "account_link_group_includes", value)
@property
@pulumi.getter(name="acsBinding")
def acs_binding(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "acs_binding")
@acs_binding.setter
def acs_binding(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "acs_binding", value)
@property
@pulumi.getter(name="acsType")
def acs_type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "acs_type")
@acs_type.setter
def acs_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "acs_type", value)
@property
@pulumi.getter
def audience(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "audience")
@audience.setter
def audience(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "audience", value)
@property
@pulumi.getter(name="deprovisionedAction")
def deprovisioned_action(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "deprovisioned_action")
@deprovisioned_action.setter
def deprovisioned_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "deprovisioned_action", value)
@property
@pulumi.getter(name="groupsAction")
def groups_action(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "groups_action")
@groups_action.setter
def groups_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "groups_action", value)
@property
@pulumi.getter(name="groupsAssignments")
def groups_assignments(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "groups_assignments")
@groups_assignments.setter
def groups_assignments(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "groups_assignments", value)
@property
@pulumi.getter(name="groupsAttribute")
def groups_attribute(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "groups_attribute")
@groups_attribute.setter
def groups_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "groups_attribute", value)
@property
@pulumi.getter(name="groupsFilters")
def groups_filters(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "groups_filters")
@groups_filters.setter
def groups_filters(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "groups_filters", value)
@property
@pulumi.getter
def issuer(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "issuer")
@issuer.setter
def issuer(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "issuer", value)
@property
@pulumi.getter(name="issuerMode")
def issuer_mode(self) -> Optional[pulumi.Input[str]]:
"""
Indicates whether Okta uses the original Okta org domain URL, or a custom domain URL
"""
return pulumi.get(self, "issuer_mode")
@issuer_mode.setter
def issuer_mode(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "issuer_mode", value)
@property
@pulumi.getter
def kid(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "kid")
@kid.setter
def kid(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kid", value)
@property
@pulumi.getter(name="maxClockSkew")
def max_clock_skew(self) -> Optional[pulumi.Input[int]]:
return pulumi.get(self, "max_clock_skew")
@max_clock_skew.setter
def max_clock_skew(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "max_clock_skew", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
name of idp
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter(name="nameFormat")
def name_format(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "name_format")
@name_format.setter
def name_format(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name_format", value)
@property
@pulumi.getter(name="profileMaster")
def profile_master(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "profile_master")
@profile_master.setter
def profile_master(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "profile_master", value)
@property
@pulumi.getter(name="provisioningAction")
def provisioning_action(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "provisioning_action")
@provisioning_action.setter
def provisioning_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "provisioning_action", value)
@property
@pulumi.getter(name="requestSignatureAlgorithm")
def request_signature_algorithm(self) -> Optional[pulumi.Input[str]]:
"""
algorithm to use to sign requests
"""
return pulumi.get(self, "request_signature_algorithm")
@request_signature_algorithm.setter
def request_signature_algorithm(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "request_signature_algorithm", value)
@property
@pulumi.getter(name="requestSignatureScope")
def request_signature_scope(self) -> Optional[pulumi.Input[str]]:
"""
algorithm to use to sign response
"""
return pulumi.get(self, "request_signature_scope")
@request_signature_scope.setter
def request_signature_scope(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "request_signature_scope", value)
@property
@pulumi.getter(name="responseSignatureAlgorithm")
def response_signature_algorithm(self) -> Optional[pulumi.Input[str]]:
"""
algorithm to use to sign requests
"""
return pulumi.get(self, "response_signature_algorithm")
@response_signature_algorithm.setter
def response_signature_algorithm(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "response_signature_algorithm", value)
@property
@pulumi.getter(name="responseSignatureScope")
def response_signature_scope(self) -> Optional[pulumi.Input[str]]:
"""
algorithm to use to sign response
"""
return pulumi.get(self, "response_signature_scope")
@response_signature_scope.setter
def response_signature_scope(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "response_signature_scope", value)
@property
@pulumi.getter(name="ssoBinding")
def sso_binding(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "sso_binding")
@sso_binding.setter
def sso_binding(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sso_binding", value)
@property
@pulumi.getter(name="ssoDestination")
def sso_destination(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "sso_destination")
@sso_destination.setter
def sso_destination(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sso_destination", value)
@property
@pulumi.getter(name="ssoUrl")
def sso_url(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "sso_url")
@sso_url.setter
def sso_url(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "sso_url", value)
@property
@pulumi.getter
def status(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "status")
@status.setter
def status(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "status", value)
@property
@pulumi.getter(name="subjectFilter")
def subject_filter(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "subject_filter")
@subject_filter.setter
def subject_filter(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subject_filter", value)
@property
@pulumi.getter(name="subjectFormats")
def subject_formats(self) -> Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]:
return pulumi.get(self, "subject_formats")
@subject_formats.setter
def subject_formats(self, value: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]]):
pulumi.set(self, "subject_formats", value)
@property
@pulumi.getter(name="subjectMatchAttribute")
def subject_match_attribute(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "subject_match_attribute")
@subject_match_attribute.setter
def subject_match_attribute(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subject_match_attribute", value)
@property
@pulumi.getter(name="subjectMatchType")
def subject_match_type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "subject_match_type")
@subject_match_type.setter
def subject_match_type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "subject_match_type", value)
@property
@pulumi.getter(name="suspendedAction")
def suspended_action(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "suspended_action")
@suspended_action.setter
def suspended_action(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "suspended_action", value)
@property
@pulumi.getter
def type(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "type")
@type.setter
def type(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "type", value)
@property
@pulumi.getter(name="usernameTemplate")
def username_template(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "username_template")
@username_template.setter
def username_template(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "username_template", value)
class SamlIdp(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_link_action: Optional[pulumi.Input[str]] = None,
account_link_group_includes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
acs_binding: Optional[pulumi.Input[str]] = None,
acs_type: Optional[pulumi.Input[str]] = None,
deprovisioned_action: Optional[pulumi.Input[str]] = None,
groups_action: Optional[pulumi.Input[str]] = None,
groups_assignments: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
groups_attribute: Optional[pulumi.Input[str]] = None,
groups_filters: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
issuer: Optional[pulumi.Input[str]] = None,
issuer_mode: Optional[pulumi.Input[str]] = None,
kid: Optional[pulumi.Input[str]] = None,
max_clock_skew: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
name_format: Optional[pulumi.Input[str]] = None,
profile_master: Optional[pulumi.Input[bool]] = None,
provisioning_action: Optional[pulumi.Input[str]] = None,
request_signature_algorithm: Optional[pulumi.Input[str]] = None,
request_signature_scope: Optional[pulumi.Input[str]] = None,
response_signature_algorithm: Optional[pulumi.Input[str]] = None,
response_signature_scope: Optional[pulumi.Input[str]] = None,
sso_binding: Optional[pulumi.Input[str]] = None,
sso_destination: Optional[pulumi.Input[str]] = None,
sso_url: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
subject_filter: Optional[pulumi.Input[str]] = None,
subject_formats: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
subject_match_attribute: Optional[pulumi.Input[str]] = None,
subject_match_type: Optional[pulumi.Input[str]] = None,
suspended_action: Optional[pulumi.Input[str]] = None,
username_template: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Create a SamlIdp resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] issuer_mode: Indicates whether Okta uses the original Okta org domain URL, or a custom domain URL
:param pulumi.Input[str] name: name of idp
:param pulumi.Input[str] request_signature_algorithm: algorithm to use to sign requests
:param pulumi.Input[str] request_signature_scope: algorithm to use to sign response
:param pulumi.Input[str] response_signature_algorithm: algorithm to use to sign requests
:param pulumi.Input[str] response_signature_scope: algorithm to use to sign response
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: SamlIdpArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a SamlIdp resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param SamlIdpArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SamlIdpArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_link_action: Optional[pulumi.Input[str]] = None,
account_link_group_includes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
acs_binding: Optional[pulumi.Input[str]] = None,
acs_type: Optional[pulumi.Input[str]] = None,
deprovisioned_action: Optional[pulumi.Input[str]] = None,
groups_action: Optional[pulumi.Input[str]] = None,
groups_assignments: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
groups_attribute: Optional[pulumi.Input[str]] = None,
groups_filters: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
issuer: Optional[pulumi.Input[str]] = None,
issuer_mode: Optional[pulumi.Input[str]] = None,
kid: Optional[pulumi.Input[str]] = None,
max_clock_skew: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
name_format: Optional[pulumi.Input[str]] = None,
profile_master: Optional[pulumi.Input[bool]] = None,
provisioning_action: Optional[pulumi.Input[str]] = None,
request_signature_algorithm: Optional[pulumi.Input[str]] = None,
request_signature_scope: Optional[pulumi.Input[str]] = None,
response_signature_algorithm: Optional[pulumi.Input[str]] = None,
response_signature_scope: Optional[pulumi.Input[str]] = None,
sso_binding: Optional[pulumi.Input[str]] = None,
sso_destination: Optional[pulumi.Input[str]] = None,
sso_url: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
subject_filter: Optional[pulumi.Input[str]] = None,
subject_formats: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
subject_match_attribute: Optional[pulumi.Input[str]] = None,
subject_match_type: Optional[pulumi.Input[str]] = None,
suspended_action: Optional[pulumi.Input[str]] = None,
username_template: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SamlIdpArgs.__new__(SamlIdpArgs)
__props__.__dict__["account_link_action"] = account_link_action
__props__.__dict__["account_link_group_includes"] = account_link_group_includes
if acs_binding is not None and not opts.urn:
warnings.warn("""This property will be removed in the future, as it can only be set to 'HTTP-POST'""", DeprecationWarning)
pulumi.log.warn("""acs_binding is deprecated: This property will be removed in the future, as it can only be set to 'HTTP-POST'""")
__props__.__dict__["acs_binding"] = acs_binding
__props__.__dict__["acs_type"] = acs_type
__props__.__dict__["deprovisioned_action"] = deprovisioned_action
__props__.__dict__["groups_action"] = groups_action
__props__.__dict__["groups_assignments"] = groups_assignments
__props__.__dict__["groups_attribute"] = groups_attribute
__props__.__dict__["groups_filters"] = groups_filters
if issuer is None and not opts.urn:
raise TypeError("Missing required property 'issuer'")
__props__.__dict__["issuer"] = issuer
__props__.__dict__["issuer_mode"] = issuer_mode
if kid is None and not opts.urn:
raise TypeError("Missing required property 'kid'")
__props__.__dict__["kid"] = kid
__props__.__dict__["max_clock_skew"] = max_clock_skew
__props__.__dict__["name"] = name
__props__.__dict__["name_format"] = name_format
__props__.__dict__["profile_master"] = profile_master
__props__.__dict__["provisioning_action"] = provisioning_action
__props__.__dict__["request_signature_algorithm"] = request_signature_algorithm
__props__.__dict__["request_signature_scope"] = request_signature_scope
__props__.__dict__["response_signature_algorithm"] = response_signature_algorithm
__props__.__dict__["response_signature_scope"] = response_signature_scope
__props__.__dict__["sso_binding"] = sso_binding
__props__.__dict__["sso_destination"] = sso_destination
if sso_url is None and not opts.urn:
raise TypeError("Missing required property 'sso_url'")
__props__.__dict__["sso_url"] = sso_url
__props__.__dict__["status"] = status
__props__.__dict__["subject_filter"] = subject_filter
__props__.__dict__["subject_formats"] = subject_formats
__props__.__dict__["subject_match_attribute"] = subject_match_attribute
__props__.__dict__["subject_match_type"] = subject_match_type
__props__.__dict__["suspended_action"] = suspended_action
__props__.__dict__["username_template"] = username_template
__props__.__dict__["audience"] = None
__props__.__dict__["type"] = None
super(SamlIdp, __self__).__init__(
'okta:deprecated/samlIdp:SamlIdp',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
account_link_action: Optional[pulumi.Input[str]] = None,
account_link_group_includes: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
acs_binding: Optional[pulumi.Input[str]] = None,
acs_type: Optional[pulumi.Input[str]] = None,
audience: Optional[pulumi.Input[str]] = None,
deprovisioned_action: Optional[pulumi.Input[str]] = None,
groups_action: Optional[pulumi.Input[str]] = None,
groups_assignments: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
groups_attribute: Optional[pulumi.Input[str]] = None,
groups_filters: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
issuer: Optional[pulumi.Input[str]] = None,
issuer_mode: Optional[pulumi.Input[str]] = None,
kid: Optional[pulumi.Input[str]] = None,
max_clock_skew: Optional[pulumi.Input[int]] = None,
name: Optional[pulumi.Input[str]] = None,
name_format: Optional[pulumi.Input[str]] = None,
profile_master: Optional[pulumi.Input[bool]] = None,
provisioning_action: Optional[pulumi.Input[str]] = None,
request_signature_algorithm: Optional[pulumi.Input[str]] = None,
request_signature_scope: Optional[pulumi.Input[str]] = None,
response_signature_algorithm: Optional[pulumi.Input[str]] = None,
response_signature_scope: Optional[pulumi.Input[str]] = None,
sso_binding: Optional[pulumi.Input[str]] = None,
sso_destination: Optional[pulumi.Input[str]] = None,
sso_url: Optional[pulumi.Input[str]] = None,
status: Optional[pulumi.Input[str]] = None,
subject_filter: Optional[pulumi.Input[str]] = None,
subject_formats: Optional[pulumi.Input[Sequence[pulumi.Input[str]]]] = None,
subject_match_attribute: Optional[pulumi.Input[str]] = None,
subject_match_type: Optional[pulumi.Input[str]] = None,
suspended_action: Optional[pulumi.Input[str]] = None,
type: Optional[pulumi.Input[str]] = None,
username_template: Optional[pulumi.Input[str]] = None) -> 'SamlIdp':
"""
Get an existing SamlIdp resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] issuer_mode: Indicates whether Okta uses the original Okta org domain URL, or a custom domain URL
:param pulumi.Input[str] name: name of idp
:param pulumi.Input[str] request_signature_algorithm: algorithm to use to sign requests
:param pulumi.Input[str] request_signature_scope: algorithm to use to sign response
:param pulumi.Input[str] response_signature_algorithm: algorithm to use to sign requests
:param pulumi.Input[str] response_signature_scope: algorithm to use to sign response
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _SamlIdpState.__new__(_SamlIdpState)
__props__.__dict__["account_link_action"] = account_link_action
__props__.__dict__["account_link_group_includes"] = account_link_group_includes
__props__.__dict__["acs_binding"] = acs_binding
__props__.__dict__["acs_type"] = acs_type
__props__.__dict__["audience"] = audience
__props__.__dict__["deprovisioned_action"] = deprovisioned_action
__props__.__dict__["groups_action"] = groups_action
__props__.__dict__["groups_assignments"] = groups_assignments
__props__.__dict__["groups_attribute"] = groups_attribute
__props__.__dict__["groups_filters"] = groups_filters
__props__.__dict__["issuer"] = issuer
__props__.__dict__["issuer_mode"] = issuer_mode
__props__.__dict__["kid"] = kid
__props__.__dict__["max_clock_skew"] = max_clock_skew
__props__.__dict__["name"] = name
__props__.__dict__["name_format"] = name_format
__props__.__dict__["profile_master"] = profile_master
__props__.__dict__["provisioning_action"] = provisioning_action
__props__.__dict__["request_signature_algorithm"] = request_signature_algorithm
__props__.__dict__["request_signature_scope"] = request_signature_scope
__props__.__dict__["response_signature_algorithm"] = response_signature_algorithm
__props__.__dict__["response_signature_scope"] = response_signature_scope
__props__.__dict__["sso_binding"] = sso_binding
__props__.__dict__["sso_destination"] = sso_destination
__props__.__dict__["sso_url"] = sso_url
__props__.__dict__["status"] = status
__props__.__dict__["subject_filter"] = subject_filter
__props__.__dict__["subject_formats"] = subject_formats
__props__.__dict__["subject_match_attribute"] = subject_match_attribute
__props__.__dict__["subject_match_type"] = subject_match_type
__props__.__dict__["suspended_action"] = suspended_action
__props__.__dict__["type"] = type
__props__.__dict__["username_template"] = username_template
return SamlIdp(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accountLinkAction")
def account_link_action(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "account_link_action")
@property
@pulumi.getter(name="accountLinkGroupIncludes")
def account_link_group_includes(self) -> pulumi.Output[Optional[Sequence[str]]]:
return pulumi.get(self, "account_link_group_includes")
@property
@pulumi.getter(name="acsBinding")
def acs_binding(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "acs_binding")
@property
@pulumi.getter(name="acsType")
def acs_type(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "acs_type")
@property
@pulumi.getter
def audience(self) -> pulumi.Output[str]:
return pulumi.get(self, "audience")
@property
@pulumi.getter(name="deprovisionedAction")
def deprovisioned_action(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "deprovisioned_action")
@property
@pulumi.getter(name="groupsAction")
def groups_action(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "groups_action")
@property
@pulumi.getter(name="groupsAssignments")
def groups_assignments(self) -> pulumi.Output[Optional[Sequence[str]]]:
return pulumi.get(self, "groups_assignments")
@property
@pulumi.getter(name="groupsAttribute")
def groups_attribute(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "groups_attribute")
@property
@pulumi.getter(name="groupsFilters")
def groups_filters(self) -> pulumi.Output[Optional[Sequence[str]]]:
return pulumi.get(self, "groups_filters")
@property
@pulumi.getter
def issuer(self) -> pulumi.Output[str]:
return pulumi.get(self, "issuer")
@property
@pulumi.getter(name="issuerMode")
def issuer_mode(self) -> pulumi.Output[Optional[str]]:
"""
Indicates whether Okta uses the original Okta org domain URL, or a custom domain URL
"""
return pulumi.get(self, "issuer_mode")
@property
@pulumi.getter
def kid(self) -> pulumi.Output[str]:
return pulumi.get(self, "kid")
@property
@pulumi.getter(name="maxClockSkew")
def max_clock_skew(self) -> pulumi.Output[Optional[int]]:
return pulumi.get(self, "max_clock_skew")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
name of idp
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="nameFormat")
def name_format(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "name_format")
@property
@pulumi.getter(name="profileMaster")
def profile_master(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "profile_master")
@property
@pulumi.getter(name="provisioningAction")
def provisioning_action(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "provisioning_action")
@property
@pulumi.getter(name="requestSignatureAlgorithm")
def request_signature_algorithm(self) -> pulumi.Output[Optional[str]]:
"""
algorithm to use to sign requests
"""
return pulumi.get(self, "request_signature_algorithm")
@property
@pulumi.getter(name="requestSignatureScope")
def request_signature_scope(self) -> pulumi.Output[Optional[str]]:
"""
algorithm to use to sign response
"""
return pulumi.get(self, "request_signature_scope")
@property
@pulumi.getter(name="responseSignatureAlgorithm")
def response_signature_algorithm(self) -> pulumi.Output[Optional[str]]:
"""
algorithm to use to sign requests
"""
return pulumi.get(self, "response_signature_algorithm")
@property
@pulumi.getter(name="responseSignatureScope")
def response_signature_scope(self) -> pulumi.Output[Optional[str]]:
"""
algorithm to use to sign response
"""
return pulumi.get(self, "response_signature_scope")
@property
@pulumi.getter(name="ssoBinding")
def sso_binding(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "sso_binding")
@property
@pulumi.getter(name="ssoDestination")
def sso_destination(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "sso_destination")
@property
@pulumi.getter(name="ssoUrl")
def sso_url(self) -> pulumi.Output[str]:
return pulumi.get(self, "sso_url")
@property
@pulumi.getter
def status(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "status")
@property
@pulumi.getter(name="subjectFilter")
def subject_filter(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "subject_filter")
@property
@pulumi.getter(name="subjectFormats")
def subject_formats(self) -> pulumi.Output[Optional[Sequence[str]]]:
return pulumi.get(self, "subject_formats")
@property
@pulumi.getter(name="subjectMatchAttribute")
def subject_match_attribute(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "subject_match_attribute")
@property
@pulumi.getter(name="subjectMatchType")
def subject_match_type(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "subject_match_type")
@property
@pulumi.getter(name="suspendedAction")
def suspended_action(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "suspended_action")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
return pulumi.get(self, "type")
@property
@pulumi.getter(name="usernameTemplate")
def username_template(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "username_template")
| 44.491037
| 147
| 0.664658
| 6,578
| 57,082
| 5.468075
| 0.032685
| 0.107342
| 0.114821
| 0.137007
| 0.951403
| 0.945064
| 0.936751
| 0.92463
| 0.914371
| 0.881398
| 0
| 0.000023
| 0.222312
| 57,082
| 1,282
| 148
| 44.525741
| 0.810236
| 0.067447
| 0
| 0.886228
| 1
| 0.005988
| 0.122971
| 0.034633
| 0
| 0
| 0
| 0
| 0
| 1
| 0.167665
| false
| 0.000998
| 0.00499
| 0.078842
| 0.273453
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
7671de0bd10a0cf7f59c8417b24ea687f1c57f0b
| 19,087
|
py
|
Python
|
UI/LoginWin.py
|
hitergszf/ArtisticCloudBlog
|
764af907c8a8e48efc420034c47c7d8bba4331be
|
[
"MIT"
] | 7
|
2020-07-18T10:21:10.000Z
|
2020-07-19T11:21:12.000Z
|
UI/LoginWin.py
|
MaoGreenDou/ArtisticCloudBlog
|
bf0608864b0b91368f0dfd3bc593bbedaa7576ef
|
[
"MIT"
] | 1
|
2021-07-29T07:16:54.000Z
|
2021-07-29T07:16:54.000Z
|
UI/LoginWin.py
|
MaoGreenDou/ArtisticCloudBlog
|
bf0608864b0b91368f0dfd3bc593bbedaa7576ef
|
[
"MIT"
] | 5
|
2020-07-18T10:21:12.000Z
|
2020-12-09T12:22:47.000Z
|
from PyQt5.QtWidgets import *
from PyQt5 import QtCore, QtWidgets, QtGui
import BLL.ClientSocket
import BLL.FileSystem
# 登录界面
class LoginWin(QWidget):
def __init__(self):
super(LoginWin, self).__init__()
# 设置窗口背景颜色为白色
pe = QtGui.QPalette()
pe.setColor(pe.Background, QtGui.QColor(255, 255, 255))
self.setPalette(pe)
# 调整初始窗口位置
screen = QDesktopWidget().screenGeometry()
size = self.geometry()
self.move((screen.width() - size.width()) / 2,
(screen.height() - size.height()) / 2)
self.MainWin = None
# 设置窗体为只有关闭按钮
self.setWindowFlags(QtCore.Qt.WindowCloseButtonHint)
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
# 窗口总布局
layout = QHBoxLayout(self.window())
# 设置总布局内部的间距
layout.setContentsMargins(0, 0, 0, 0)
# 放置图片的窗口
imgWidget = QtWidgets.QWidget()
imgLayout = QVBoxLayout(imgWidget)
# 图片标签
imgLabel = QtWidgets.QLabel()
# 填充图片
fileSystem = BLL.FileSystem.FileSystem()
iconPath = fileSystem.iconPath
path = iconPath + '/' + "loginImg.png"
img = QtGui.QImage(path)
# 设置最大长宽
maxSize = QtCore.QSize(500, 500)
# 按比例放缩(最大长宽通过传入的QSize限制)
loginImg = QtGui.QPixmap.fromImage(img.scaled(maxSize, QtCore.Qt.KeepAspectRatio,
QtCore.Qt.SmoothTransformation))
imgLabel.setPixmap(loginImg)
# 居中
imgLabel.setAlignment(QtCore.Qt.AlignCenter)
# 设置图片窗口的内部间距
imgLayout.setContentsMargins(0, 0, 0, 0)
# 加入布局
imgLayout.addWidget(imgLabel)
# 放置登录界面的窗口
loginWidget = QtWidgets.QWidget()
loginWidget.setMinimumSize(300, 400)
loginWidget.setContentsMargins(0, 5, 5, 0)
loginLayout = QVBoxLayout(loginWidget)
# 关闭按钮
closeButton = QPushButton()
path = iconPath + '/' + "close.png"
closeButton.setStyleSheet("QPushButton{border-image: url(%s)}" % path)
closeButton.setFixedSize(30, 30)
closeButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
# 介绍词
welcomeLabel = QTextBrowser()
welcomeLabel.setText("艺术云博客")
welcomeLabel.setAlignment(QtCore.Qt.AlignCenter)
welcomeLabel.setFont(QtGui.QFont("华文彩云", 24, QtGui.QFont.Bold))
welcomeLabel.setStyleSheet("background:transparent;border-width:0;border-style:outset")
# 账号与密码框
self.accountEdit = QLineEdit()
self.accountEdit.setStyleSheet(
"""background:white;
padding-left:10px ;
padding-top:1px ;
border: 2px solid rgb(209 , 209 , 209);
border-top:transparent;
border-left:transparent;
border-right:transparent;
""")
self.accountEdit.setPlaceholderText("请输入用户名")
self.accountEdit.setMinimumSize(240, 40)
self.passwordEdit = QLineEdit()
self.passwordEdit.setStyleSheet(
"""background:white;
padding-left:10px ;
padding-top:1px ;
border: 2px solid rgb(209 , 209 , 209);
border-top:transparent;
border-left:transparent;
border-right:transparent;
""")
self.passwordEdit.setPlaceholderText("请输入密码")
self.passwordEdit.setMinimumSize(240, 40)
# 设置密码不可见
self.passwordEdit.setEchoMode(QLineEdit.Password)
# 按钮
loginButton = QPushButton()
loginButton.setFixedSize(240, 40)
loginButton.setText("登 录")
loginButton.setStyleSheet("""
color:white;
background-color:rgb(14 , 150 , 254);
border-radius:10px;
""")
loginButton.setFont(QtGui.QFont("微软雅黑", 10, QtGui.QFont.Normal))
loginButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
registerButton = QPushButton("还没有账号?点此注册")
registerButton.setFlat(True)
registerButton.setStyleSheet("QPushButton{background: transparent;}")
registerButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
forgotPswButton = QPushButton("忘记密码")
forgotPswButton.setFlat(True)
forgotPswButton.setStyleSheet("color:blue;")
forgotPswButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
# 登录布局添加
loginLayout.setContentsMargins(0, 0,0 ,0)
loginLayout.addWidget(closeButton, 1, QtCore.Qt.AlignRight | QtCore.Qt.AlignTop)
loginLayout.addWidget(welcomeLabel, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.accountEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.passwordEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(forgotPswButton, 1, QtCore.Qt.AlignRight)
loginLayout.addWidget(loginButton, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(registerButton, 1, QtCore.Qt.AlignCenter)
# 总布局添加
layout.addWidget(imgWidget)
layout.addWidget(loginWidget)
# 按钮响应
loginButton.clicked.connect(self.login)
registerButton.clicked.connect(self.register)
forgotPswButton.clicked.connect(self.forgotPsw)
closeButton.clicked.connect(self.close)
# 密码栏回车激活登录
self.passwordEdit.returnPressed.connect(self.login)
# 设置初始焦点
self.accountEdit.setFocus()
# 设置后续要打开以及传入参数的主窗口
def setMainWin(self, MainWin):
self.MainWin = MainWin
# 登录
def login(self):
account = self.accountEdit.text()
password = self.passwordEdit.text()
if not account or not password:
simpleMessageBox('提示', '请完整填写输入框')
return
try:
client = BLL.ClientSocket.ClientSocket()
response = client.login(account, password)
except Exception as e:
print(e)
simpleMessageBox('错误', '无法连接到服务器')
return
# 登录失败
if not response:
self.accountEdit.clear()
self.passwordEdit.clear()
simpleMessageBox('提示', '用户名或密码错误')
# 登录成功
else:
self.MainWin.setUserInfo(response, account)
self.MainWin.show()
self.close()
# 弹出注册窗口
def register(self):
self.setVisible(False)
registerDialog = RegisterDialog()
registerDialog.setWindowModality(QtCore.Qt.ApplicationModal)
registerDialog.exec_()
self.setVisible(True)
# 弹出忘记密码窗口
def forgotPsw(self):
# 首先获取密保问题
# 弹出窗口,获取用户输入的新笔记名
account, okPressed = QtWidgets.QInputDialog.getText(self, "忘记密码", "请输入用户名:",
QtWidgets.QLineEdit.Normal)
# 若用户未输入或未点击确定按钮,则返回
if not okPressed or not account:
return
# 调用客户端函数获取密保问题
client = BLL.ClientSocket.ClientSocket()
response = client.getSecurityQes(account)
# 查询失败
if not response:
self.accountEdit.clear()
self.passwordEdit.clear()
simpleMessageBox('提示', '用户名不存在')
return
# 查询成功,弹出窗口
self.setVisible(False)
forgotPswDialog = ForgotPswDialog(response, account)
forgotPswDialog.setWindowModality(QtCore.Qt.ApplicationModal)
forgotPswDialog.exec_()
self.setVisible(True)
# 跳过输入阶段,快速启动
def quickStart(self):
token = "796ae392-55dd-30cd-be61-6f15e2477771"
account = "123"
self.MainWin.setUserInfo(token, account)
self.MainWin.show()
self.close()
# 注册窗口
class RegisterDialog(QDialog):
def __init__(self):
super().__init__()
# 设置窗口背景颜色为白色
pe = QtGui.QPalette()
pe.setColor(pe.Background, QtGui.QColor(255, 255, 255))
self.setPalette(pe)
# 调整初始窗口位置
screen = QDesktopWidget().screenGeometry()
size = self.geometry()
self.move((screen.width() - size.width()) / 2,
(screen.height() - size.height()) / 2)
# 设置窗体为只有关闭按钮
self.setWindowFlags(QtCore.Qt.WindowCloseButtonHint)
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
# 窗口总布局
layout = QHBoxLayout(self.window())
# 设置总布局内部的间距
layout.setContentsMargins(0, 0, 0, 0)
# 放置图片的窗口
imgWidget = QtWidgets.QWidget()
imgLayout = QVBoxLayout(imgWidget)
# 图片标签
imgLabel = QtWidgets.QLabel()
# 填充图片
fileSystem = BLL.FileSystem.FileSystem()
iconPath = fileSystem.iconPath
path = iconPath + '/' + "loginImg.png"
img = QtGui.QImage(path)
# 设置最大长宽
maxSize = QtCore.QSize(500, 500)
# 按比例放缩(最大长宽通过传入的QSize限制)
loginImg = QtGui.QPixmap.fromImage(img.scaled(maxSize, QtCore.Qt.KeepAspectRatio,
QtCore.Qt.SmoothTransformation))
imgLabel.setPixmap(loginImg)
# 居中
imgLabel.setAlignment(QtCore.Qt.AlignCenter)
# 设置图片窗口的内部间距
imgLayout.setContentsMargins(0, 0, 0, 0)
# 加入布局
imgLayout.addWidget(imgLabel)
# 放置登录界面的窗口
loginWidget = QtWidgets.QWidget()
loginWidget.setMinimumSize(300, 400)
loginWidget.setContentsMargins(0, 5, 5, 0)
loginLayout = QVBoxLayout(loginWidget)
# 设置统一格式
loginWidget.setStyleSheet(
"""QLineEdit{background:white;
padding-left:10px ;
padding-top:1px ;
border: 2px solid rgb(209 , 209 , 209);
border-top:transparent;
border-left:transparent;
border-right:transparent;
}
""")
# 关闭按钮
closeButton = QPushButton()
path = iconPath + '/' + "close.png"
closeButton.setStyleSheet("QPushButton{border-image: url(%s)}" % path)
closeButton.setFixedSize(30, 30)
closeButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
# 介绍词
welcomeLabel = QTextBrowser()
welcomeLabel.setText("艺术云博客")
welcomeLabel.setAlignment(QtCore.Qt.AlignCenter)
welcomeLabel.setFont(QtGui.QFont("华文彩云", 24, QtGui.QFont.Bold))
welcomeLabel.setStyleSheet("background:transparent;border-width:0;border-style:outset")
# 账号、密码、密保问题、密保答案
self.accountEdit = QLineEdit()
self.accountEdit.setPlaceholderText("请输入用户名")
self.accountEdit.setMinimumSize(240, 40)
self.passwordEdit = QLineEdit()
self.passwordEdit.setPlaceholderText("请输入密码")
self.passwordEdit.setMinimumSize(240, 40)
self.questionEdit = QLineEdit()
self.questionEdit.setPlaceholderText("请输入密码保护问题")
self.questionEdit.setMinimumSize(240, 40)
self.answerEdit = QLineEdit()
self.answerEdit.setPlaceholderText("请输入密码保护答案")
self.answerEdit.setMinimumSize(240, 40)
# 设置密码不可见
self.passwordEdit.setEchoMode(QLineEdit.Password)
# 按钮
registerButton = QPushButton()
registerButton.setFixedSize(240, 40)
registerButton.setText("注 册")
registerButton.setStyleSheet("""
color:white;
background-color:rgb(14 , 150 , 254);
border-radius:10px;
""")
registerButton.setFont(QtGui.QFont("微软雅黑", 10, QtGui.QFont.Normal))
registerButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
forgotPswButton = QPushButton("忘记密码")
forgotPswButton.setFlat(True)
forgotPswButton.setStyleSheet("color:blue;")
forgotPswButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
# 登录布局添加
loginLayout.setContentsMargins(0, 0, 0, 0)
loginLayout.addWidget(closeButton, 1, QtCore.Qt.AlignRight | QtCore.Qt.AlignTop)
loginLayout.addWidget(welcomeLabel, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.accountEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.passwordEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.questionEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.answerEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(registerButton, 1, QtCore.Qt.AlignCenter)
# 总布局添加
layout.addWidget(imgWidget)
layout.addWidget(loginWidget)
# 按钮响应
registerButton.clicked.connect(self.register)
closeButton.clicked.connect(self.close)
# 设置初始焦点
self.accountEdit.setFocus()
# 注册
def register(self):
account = self.accountEdit.text()
password = self.passwordEdit.text()
question = self.questionEdit.text()
answer = self.answerEdit.text()
if not account or not password or not question or not answer:
simpleMessageBox('提示', '请完整填写输入框')
return
try:
client = BLL.ClientSocket.ClientSocket()
response = client.register(account, password, question, answer)
except Exception as e:
print(e)
simpleMessageBox('错误', '无法连接到服务器')
return
if not response:
simpleMessageBox('提示', '用户名已存在')
else:
simpleMessageBox('提示', '账号注册成功')
self.close()
# 忘记密码窗口
class ForgotPswDialog(QDialog):
def __init__(self, question, account):
super().__init__()
self.account = account
# 设置窗口背景颜色为白色
pe = QtGui.QPalette()
pe.setColor(pe.Background, QtGui.QColor(255, 255, 255))
self.setPalette(pe)
# 调整初始窗口位置
screen = QDesktopWidget().screenGeometry()
size = self.geometry()
self.move((screen.width() - size.width()) / 2,
(screen.height() - size.height()) / 2)
# 设置窗体为只有关闭按钮
self.setWindowFlags(QtCore.Qt.WindowCloseButtonHint)
self.setWindowFlags(QtCore.Qt.FramelessWindowHint)
# 窗口总布局
layout = QHBoxLayout(self.window())
# 设置总布局内部的间距
layout.setContentsMargins(0, 0, 0, 0)
# 放置图片的窗口
imgWidget = QtWidgets.QWidget()
imgLayout = QVBoxLayout(imgWidget)
# 图片标签
imgLabel = QtWidgets.QLabel()
# 填充图片
fileSystem = BLL.FileSystem.FileSystem()
iconPath = fileSystem.iconPath
path = iconPath + '/' + "loginImg.png"
img = QtGui.QImage(path)
# 设置最大长宽
maxSize = QtCore.QSize(500, 500)
# 按比例放缩(最大长宽通过传入的QSize限制)
loginImg = QtGui.QPixmap.fromImage(img.scaled(maxSize, QtCore.Qt.KeepAspectRatio,
QtCore.Qt.SmoothTransformation))
imgLabel.setPixmap(loginImg)
# 居中
imgLabel.setAlignment(QtCore.Qt.AlignCenter)
# 设置图片窗口的内部间距
imgLayout.setContentsMargins(0, 0, 0, 0)
# 加入布局
imgLayout.addWidget(imgLabel)
# 放置登录界面的窗口
loginWidget = QtWidgets.QWidget()
loginWidget.setMinimumSize(300, 400)
loginWidget.setContentsMargins(0, 5, 5, 0)
loginLayout = QVBoxLayout(loginWidget)
# 设置统一格式
loginWidget.setStyleSheet(
"""QLineEdit{background:white;
padding-left:10px ;
padding-top:1px ;
border: 2px solid rgb(209 , 209 , 209);
border-top:transparent;
border-left:transparent;
border-right:transparent;
}
""")
# 关闭按钮
closeButton = QPushButton()
path = iconPath + '/' + "close.png"
closeButton.setStyleSheet("QPushButton{border-image: url(%s)}" % path)
closeButton.setFixedSize(30, 30)
closeButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
# 介绍词
welcomeLabel = QTextBrowser()
welcomeLabel.setText("艺术云博客")
welcomeLabel.setAlignment(QtCore.Qt.AlignCenter)
welcomeLabel.setFont(QtGui.QFont("华文彩云", 24, QtGui.QFont.Bold))
welcomeLabel.setStyleSheet("background:transparent;border-width:0;border-style:outset")
# 新的密码、密保问题、密保答案
self.passwordEdit = QLineEdit()
self.passwordEdit.setPlaceholderText("请输入新的密码")
self.passwordEdit.setMinimumSize(240, 40)
self.questionLabel = QLabel()
self.questionLabel.setText('问题:' + question)
self.questionLabel.setAlignment(QtCore.Qt.AlignCenter)
self.questionLabel.setMinimumSize(240, 40)
self.answerEdit = QLineEdit()
self.answerEdit.setPlaceholderText("请输入密码保护答案")
self.answerEdit.setMinimumSize(240, 40)
# 设置密码不可见
self.passwordEdit.setEchoMode(QLineEdit.Password)
# 按钮
forgotPswButton = QPushButton()
forgotPswButton.setFixedSize(240, 40)
forgotPswButton.setText("重置密码")
forgotPswButton.setStyleSheet("""
color:white;
background-color:rgb(14 , 150 , 254);
border-radius:10px;
""")
forgotPswButton.setFont(QtGui.QFont("微软雅黑", 10, QtGui.QFont.Normal))
forgotPswButton.setCursor(QtGui.QCursor(QtCore.Qt.PointingHandCursor))
# 登录布局添加
loginLayout.setContentsMargins(0, 0, 0, 0)
loginLayout.addWidget(closeButton, 1, QtCore.Qt.AlignRight | QtCore.Qt.AlignTop)
loginLayout.addWidget(welcomeLabel, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.questionLabel, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.answerEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(self.passwordEdit, 1, QtCore.Qt.AlignCenter)
loginLayout.addWidget(forgotPswButton, 1, QtCore.Qt.AlignCenter)
# 总布局添加
layout.addWidget(imgWidget)
layout.addWidget(loginWidget)
# 按钮响应
forgotPswButton.clicked.connect(self.forgotPsw)
closeButton.clicked.connect(self.close)
# 设置初始焦点
self.answerEdit.setFocus()
# 忘记密码
def forgotPsw(self):
answer = self.answerEdit.text()
password = self.passwordEdit.text()
account = self.account
if not account or not password or not answer:
simpleMessageBox('提示', '请完整填写输入框')
return
try:
client = BLL.ClientSocket.ClientSocket()
response = client.forgotPsw(account, password, answer)
if response:
simpleMessageBox('提示', '密码重置成功,新的密码为:' + response)
self.close()
else:
simpleMessageBox('提示', '密保答案错误,密码重置失败')
except Exception as e:
print(e)
simpleMessageBox('错误', '无法连接到服务器')
return
def simpleMessageBox(title, text):
messageBox = QMessageBox()
messageBox.setWindowTitle(title)
messageBox.setText(text)
messageBox.setStandardButtons(QMessageBox.Yes)
buttonY = messageBox.button(QMessageBox.Yes)
buttonY.setText('确定')
messageBox.exec_()
| 35.743446
| 95
| 0.612197
| 1,680
| 19,087
| 6.939286
| 0.16131
| 0.03637
| 0.037485
| 0.027449
| 0.781009
| 0.765998
| 0.743867
| 0.736404
| 0.72165
| 0.70235
| 0
| 0.024378
| 0.28433
| 19,087
| 533
| 96
| 35.810507
| 0.829063
| 0.038822
| 0
| 0.736544
| 0
| 0
| 0.067766
| 0.021792
| 0
| 0
| 0
| 0
| 0
| 1
| 0.031161
| false
| 0.07932
| 0.011331
| 0
| 0.073654
| 0.008499
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 7
|
7677200ed13ee1bb5666bc454b5b44450c0b7ab5
| 4,694
|
py
|
Python
|
conversion/temperatureConversion.py
|
slowy07/pythonApps
|
22f9766291dbccd8185035745950c5ee4ebd6a3e
|
[
"MIT"
] | 10
|
2020-10-09T11:05:18.000Z
|
2022-02-13T03:22:10.000Z
|
conversion/temperatureConversion.py
|
khairanabila/pythonApps
|
f90b8823f939b98f7bf1dea7ed35fe6e22e2f730
|
[
"MIT"
] | null | null | null |
conversion/temperatureConversion.py
|
khairanabila/pythonApps
|
f90b8823f939b98f7bf1dea7ed35fe6e22e2f730
|
[
"MIT"
] | 6
|
2020-11-26T12:49:43.000Z
|
2022-03-06T06:46:43.000Z
|
def celciusToFahrenheit(celcius: float, ndigits: int = 2)->float:
"""
Convert a given value from Celsius to Fahrenheit and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
"""
return round((float(celcius) *9 / 5) + 32 , ndigits)
def celciusToKelvin(celcius: float, ndigits: int = 2)->float:
"""
Convert a given value from Celsius to Kelvin and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
"""
return round(float(celcius) + 273.15, ndigits)
def celciusToRankie(celcius:float, ndigits: int = 2)->float:
"""
Convert a given value from Celsius to Rankine and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
"""
return round((float(celcius)* 9 / 5) + 491.67, ndigits)
def fahrenheitToCelcius(fahrenheit:float, ndigits = 2)->float:
"""
Convert a given value from Fahrenheit to Celsius and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
"""
return round((float(fahrenheit) - 32) * 5 / 9, ndigits)
def fahrenheitToKelvin(fahrenheit:float, ndigits = 2)->float:
"""
Convert a given value from Fahrenheit to Kelvin and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
"""
return round(((float(fahrenheit) - 32) * 5 / 9) + 273.5, ndigits)
def fahrenheitToRankie(fahrenheit:float, ndigits = 2)->float:
"""
Convert a given value from Fahrenheit to Rankine and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
"""
return round(float(fahrenheit)+ 459.7, ndigits)
def kelvinToCelcius(kelvin:float, ndigits = 2)->float:
"""
Convert a given value from Kelvin to Celsius and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
"""
return round(float(kelvin) - 273.15, ndigits)
def kelvinToFahrenheit(kelvin:float, ndigits = 2)->float:
"""
Convert a given value from Kelvin to Fahrenheit and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
"""
return round(((float(kelvin) - 273.15)* 9 / 5)+32,ndigits)
def kelvinToRankie(kelvin:float, ndigits = 2)->float:
"""
Convert a given value from Kelvin to Rankine and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
"""
return round(float(fahrenheit)+ 459, ndigits)
def rankieToCelcius(rankie:float, ndigits = 2)->float:
"""
Convert a given value from Rankine to Celsius and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
Wikipedia reference: https://en.wikipedia.org/wiki/Celsius
"""
return round((float(rankie) - 491.67)* 5 / 9, ndigits)
def rankieToFahrenheit(rankie:float, ndigits = 2) ->float:
"""
Convert a given value from Rankine to Fahrenheit and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
Wikipedia reference: https://en.wikipedia.org/wiki/Fahrenheit
"""
return round(float(rankie) - 459.67, ndigits)
def rankieToKelvin(rankie:float, ndigits = 2)-> float:
"""
Convert a given value from Rankine to Kelvin and round it to 2 decimal places.
Wikipedia reference: https://en.wikipedia.org/wiki/Rankine_scale
Wikipedia reference: https://en.wikipedia.org/wiki/Kelvin
"""
return round((float(rankie) * 5 / 9), ndigits)
def reamurToKelvin(reamur:float, ndigits = 2) ->float:
return round((float(reamur) * 1.25 + 273.15), ndigits)
def reamurToFahrenheit(reamur: float, ndigits = 2)->float:
return round((float(reamur) * 2.25 + 32 ), ndigits)
def reamurToCelcius(reamur: float, ndigits = 2)-> float:
return round((float(reamur)* 1.25), ndigits)
def reamurToRankie(reamur: float, ndigits = 2)->float:
return round((float(reamur)* 2.25 + 32 + 459.67), ndigits)
if __name__ == '__main__':
import doctest
doctest.testmod()
| 41.910714
| 86
| 0.705582
| 629
| 4,694
| 5.243243
| 0.101749
| 0.130988
| 0.167374
| 0.181928
| 0.848393
| 0.840509
| 0.828381
| 0.819891
| 0.819891
| 0.819891
| 0
| 0.030139
| 0.172987
| 4,694
| 111
| 87
| 42.288288
| 0.819423
| 0.518747
| 0
| 0
| 0
| 0
| 0.004036
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.457143
| false
| 0
| 0.028571
| 0.114286
| 0.942857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 9
|
4f434489ef89d07e86032d634c141b427d21a2da
| 15,902
|
py
|
Python
|
PyREMOT/solvers/solResultAnalysis.py
|
sinagilassi/rmt-app
|
bbd5bb496f36116ecec15d75b4133a43a9233aaa
|
[
"MIT"
] | null | null | null |
PyREMOT/solvers/solResultAnalysis.py
|
sinagilassi/rmt-app
|
bbd5bb496f36116ecec15d75b4133a43a9233aaa
|
[
"MIT"
] | null | null | null |
PyREMOT/solvers/solResultAnalysis.py
|
sinagilassi/rmt-app
|
bbd5bb496f36116ecec15d75b4133a43a9233aaa
|
[
"MIT"
] | null | null | null |
# RESULT ANALYSIS
# ----------------
# import packages/modules
import numpy as np
# internal
from PyREMOT.library import plotClass as pltc
from PyREMOT.core.utilities import roundNum, selectFromListByIndex, selectRandomForList
from PyREMOT.docs.rmtUtility import rmtUtilityClass as rmtUtil
from PyREMOT.docs.modelSetting import MODEL_SETTING, PROCESS_SETTING
def setOptimizeRootMethod(y, params1, params2, param3=0):
"""
set results of optimize.root function
args:
params1:
compNo: component number
noLayer: number of var layers
varNoRows: 1
varNoColumns: number of finite nodes in the z direction
params2:
Cif: species concentration of feed gas
Tf: feed temperature
"""
# distribute y[i] value through the reactor length
# try/except
try:
compNo, noLayer, varNoRows, varNoColumns = params1
Cif, Tf, processType = params2
# concentration
dataYs_Concentration_DiLeVa = y[:-
1] if processType != PROCESS_SETTING['ISO-THER'] else y[:]
# temperature
dataYs_Temperature_DiLeVa = y[-1] if processType != PROCESS_SETTING['ISO-THER'] else np.repeat(
param3, varNoColumns).reshape((varNoRows, varNoColumns))
# convert to real value
# concentration
SpCo_mz_ReVa = np.zeros((compNo, varNoColumns))
T_mz_ReVa = np.zeros((1, varNoColumns))
# concentration
for i in range(compNo):
# dimensionless analysis: real value
Cif_Set = Cif[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
Cif)
SpCo_mz_ReVa[i, :] = rmtUtil.calRealDiLessValue(
dataYs_Concentration_DiLeVa[i, :], Cif_Set)
# temperature
T_mz_ReVa[0, :] = rmtUtil.calRealDiLessValue(
dataYs_Temperature_DiLeVa[0, :], Tf, mode="TEMP")
# result
res = {
"data1": SpCo_mz_ReVa,
"data2": T_mz_ReVa
}
# return
return res
except Exception as e:
raise
def sortResult2(y, params1, params2):
"""
sort result of modeling of particle diffusion-reaction
args:
params1:
compNo: component number
noLayer: number of var layers
varNoRows: 1
varNoColumns: number of finite nodes in the r direction
params2:
Cif: species concentration of feed gas
Tf: feed temperature
"""
# distribute y[i] value through the reactor length
# try/except
try:
compNo, noLayer, varNoRows, varNoColumns = params1
Cif, Tf, processType = params2
# concentration
dataYs_Concentration_DiLeVa = y[:-
1] if processType != PROCESS_SETTING['ISO-THER'] else y[:]
# temperature
dataYs_Temperature_DiLeVa = y[-1, :].reshape((1, varNoColumns)) if processType != PROCESS_SETTING['ISO-THER'] else np.repeat(
0, varNoColumns).reshape((1, varNoColumns))
# convert to real value
# concentration
SpCo_mz_ReVa = np.zeros((compNo, varNoColumns))
T_mz_ReVa = np.zeros((1, varNoColumns))
# concentration
for i in range(compNo):
# dimensionless analysis: real value
Cif_Set = Cif[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
Cif)
SpCo_mz_ReVa[i, :] = rmtUtil.calRealDiLessValue(
dataYs_Concentration_DiLeVa[i, :], Cif_Set)
# temperature
T_mz_ReVa[0, :] = rmtUtil.calRealDiLessValue(
dataYs_Temperature_DiLeVa[0, :], Tf, mode="TEMP")
# result
res = {
"data1": SpCo_mz_ReVa,
"data2": T_mz_ReVa
}
# return
return res
except Exception as e:
raise
# NOTE
def sortedResult3(yC_DiLeVa, yT_DiLeVa, yCs_DiLeVa, yTs_DiLeVa, params1, params2):
"""
sort result of heterogenous modeling
args:
params1:
compNo: component number
noLayer: number of var layers
varNoRows: 1
varNoColumns: number of finite nodes in the z direction
rNo: number of finite nodes in the r directions
zNo: the same as varNoColumns
params2:
Cif: species concentration of feed gas
Tf: feed temperature
"""
# distribute y[i] value through the reactor length
# try/except
try:
compNo, noLayer, varNoRows, varNoColumns, rNo, zNo = params1
Cif, Tf, processType = params2
# convert to real value
# gas phase concentration/temperature
SpCo_mz_ReVa = np.zeros((compNo, varNoColumns))
T_mz_ReVa = np.zeros((1, varNoColumns))
# solid phase concentration/temperature
# all species concentration in solid phase (catalyst)
SpCosi_mzr_ReVa = np.zeros((compNo, rNo, zNo))
Ts_mzr_ReVa = np.zeros((rNo, zNo))
# NOTE
### gas phases ###
# concentration
for i in range(compNo):
# dimensionless analysis: real value
Cif_Set = Cif[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
Cif)
SpCo_mz_ReVa[i, :] = rmtUtil.calRealDiLessValue(
yC_DiLeVa[i, :], Cif_Set)
# temperature
T_mz_ReVa[0, :] = rmtUtil.calRealDiLessValue(
yT_DiLeVa[0, :], Tf, mode="TEMP")
# NOTE
### solid phases ###
# concentration
for i in range(compNo):
# dimensionless analysis: real value
Cif_Set = Cif[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
Cif)
SpCosi_mzr_ReVa[i] = rmtUtil.calRealDiLessValue(
yCs_DiLeVa[i, :], Cif_Set)
# temperature
Ts_mzr_ReVa[0, :] = rmtUtil.calRealDiLessValue(
yTs_DiLeVa[0:, :], Tf, mode="TEMP")
# result
res = {
"data1": SpCo_mz_ReVa,
"data2": T_mz_ReVa,
"data3": SpCosi_mzr_ReVa,
"data4": Ts_mzr_ReVa
}
# return
return res
except Exception as e:
raise
# NOTE
# homogenous modeling results
def sortResult4(y, params1, params2):
"""
sort result of homogenous modeling
args:
params1:
compNo: component number
noLayer: number of var layers
varNoRows: 1
varNoColumns: number of finite nodes in the z direction
params2:
Cif: species concentration of feed gas
Tf: feed temperature
"""
# distribute y[i] value through the reactor length
# try/except
try:
compNo, varNoRows, varNoColumns = params1
Cif, Tf, Pf, processType = params2
# concentration
dataYs_Concentration_DiLeVa = y[:-
2] if processType != PROCESS_SETTING['ISO-THER'] else y[:-1]
# pressure
dataYs_Pressure_DiLeVa = y[-2].reshape(
(1, varNoColumns)) if processType != PROCESS_SETTING['ISO-THER'] else y[-1].reshape((1, varNoColumns))
# temperature
dataYs_Temperature_DiLeVa = y[-1].reshape((1, varNoColumns)) if processType != PROCESS_SETTING['ISO-THER'] else np.repeat(
0, varNoColumns).reshape((1, varNoColumns))
# convert to real value
# concentration
SpCo_mz_ReVa = np.zeros((compNo, varNoColumns))
T_mz_ReVa = np.zeros((1, varNoColumns))
P_mz_ReVa = np.zeros((1, varNoColumns))
# concentration
for i in range(compNo):
# dimensionless analysis: real value
Cif_Set = Cif[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
Cif)
SpCo_mz_ReVa[i, :] = rmtUtil.calRealDiLessValue(
dataYs_Concentration_DiLeVa[i, :], Cif_Set)
# pressure
P_mz_ReVa[0, :] = rmtUtil.calRealDiLessValue(
dataYs_Pressure_DiLeVa[0, :], Pf)
# temperature
T_mz_ReVa[0, :] = rmtUtil.calRealDiLessValue(
dataYs_Temperature_DiLeVa[0, :], Tf, mode="TEMP")
# result
res = {
"data1": SpCo_mz_ReVa,
"data2": P_mz_ReVa,
"data3": T_mz_ReVa
}
# return
return res
except Exception as e:
raise
def sortResult5(y, params1, params2):
"""
sort result of homogenous modeling
args:
params1:
compNo: component number
noLayer: number of var layers
varNoRows: 1
varNoColumns: number of finite nodes in the r direction
params2:
Cif: species concentration of feed gas
Tf: feed temperature
"""
# distribute y[i] value through the reactor length
# try/except
try:
compNo, varNoRows, varNoColumns = params1
Cif, Tf, processType = params2
# concentration
dataYs_Concentration_DiLeVa = y[:-
1] if processType != PROCESS_SETTING['ISO-THER'] else y[:]
# temperature
dataYs_Temperature_DiLeVa = y[-1, :].reshape((1, varNoColumns)) if processType != PROCESS_SETTING['ISO-THER'] else np.repeat(
0, varNoColumns).reshape((1, varNoColumns))
# convert to real value
# concentration
SpCo_mz_ReVa = np.zeros((compNo, varNoColumns))
T_mz_ReVa = np.zeros((1, varNoColumns))
# concentration
for i in range(compNo):
# dimensionless analysis: real value
Cif_Set = Cif[i] if MODEL_SETTING['GaMaCoTe0'] != "MAX" else np.max(
Cif)
SpCo_mz_ReVa[i, :] = rmtUtil.calRealDiLessValue(
dataYs_Concentration_DiLeVa[i, :], Cif_Set)
# temperature
T_mz_ReVa[0, :] = rmtUtil.calRealDiLessValue(
dataYs_Temperature_DiLeVa[0, :], Tf, mode="TEMP")
# result
res = {
"data1": SpCo_mz_ReVa,
"data2": T_mz_ReVa
}
# return
return res
except Exception as e:
raise
# NOTE
# plot results
def plotResultsSteadyState(dataPack):
'''
plot results
args:
dataPack:
modelId: model id,
processType: process type,
successStatus: ode success status,
computation-time: elapsed time [s],
dataShape: dataShape,
labelList: labelList,
indexList: indexList,
dataTime: [],
dataXs: dataXs,
dataYCons1: dataYs_Concentration_DiLeVa,
dataYCons2: dataYs_Concentration_ReVa,
dataYTemp1: dataYs_Temperature_DiLeVa,
dataYTemp2: dataYs_Temperature_ReVa,
dataYs: dataYs_All
'''
# try/except
try:
# model info
modelId = dataPack[0]['modelId']
processType = dataPack[0]['processType']
# calculation status
successStatus = dataPack[0]['successStatus']
# data
dataXs = dataPack[0]['dataXs']
dataYs_All = dataPack[0]['dataYs']
labelList = dataPack[0]['labelList']
indexList = dataPack[0]['indexList']
elapsed = dataPack[0]['computation-time']
# set
plotTitle = f"Steady-State Modeling {modelId}, computation-time {elapsed}"
xLabelSet = "Reactor Length (m)"
yLabelSet = ("Concentration (mol/$m^3$)",
"Pressure (bar)", "Temperature (K)")
compNo = indexList[0]
indexPressure = indexList[1]
indexTemp = indexList[2]
# check
if successStatus is True:
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataXs, dataYs_All)
# -> add label
dataList = pltc.plots2DSetDataList(XYList, labelList)
# datalists
dataLists = [dataList[0:compNo], dataList[indexPressure], dataList[indexTemp]
] if processType != PROCESS_SETTING['ISO-THER'] else [dataList[0:compNo], dataList[indexPressure]]
# select datalist
_dataListsSelected = selectFromListByIndex([], dataLists)
# subplot result
# pltc.plots2DSub(_dataListsSelected, xLabelSet,
# yLabelSet, plotTitle)
# individual figure
for f in range(len(_dataListsSelected)):
pltc.plots2D(_dataListsSelected[f], xLabelSet,
yLabelSet[f], plotTitle)
else:
dataPack = []
except Exception as e:
raise
def plotResultsDynamic(resPack, tNo):
'''
plot results
args:
resPack:
computation-time: elapsed time [s]
dataPack:
modelId: model id,
processType: process type,
successStatus: ode success status,
dataShape: dataShape,
labelList: labelList,
indexList: indexList,
dataTime: time interval,
dataXs: dataXs,
dataYCons1: dataYs_Concentration_DiLeVa,
dataYCons2: dataYs_Concentration_ReVa,
dataYTemp1: dataYs_Temperature_DiLeVa,
dataYTemp2: dataYs_Temperature_ReVa,
dataYs: dataYs_All
'''
# try/except
try:
# get
elapsed = resPack['computation-time']
dataPack = resPack['dataPack']
# model info
modelId = dataPack[0]['modelId']
processType = dataPack[0]['processType']
# calculation status
successStatus = dataPack[0]['successStatus']
# data
dataXs = dataPack[0]['dataXs']
dataYs_All = dataPack[0]['dataYs']
labelList = dataPack[0]['labelList']
indexList = dataPack[0]['indexList']
# set
plotTitle = f"Steady-State Modeling {modelId}, computation-time {elapsed}"
xLabelSet = "Reactor Length (m)"
yLabelSet = ("Concentration (mol/$m^3$)", "Temperature (K)")
compNo = indexList[0]
indexPressure = indexList[1]
indexTemp = indexList[2]
# random tNo
tNoList = list(range(tNo))
tNoRandomList = selectRandomForList(tNoList, 2)
# REVIEW
# display result at specific time
for i in tNoRandomList:
# calculation status
successStatus = dataPack[i]['successStatus']
# time
dataTime = dataPack[i]['dataTime']
# update title
plotTitle_Update = plotTitle + f" at t={dataTime}"
# check
if successStatus is True:
# data
dataXs = dataPack[i]['dataXs']
dataYs_All = dataPack[i]['dataYs']
# plot setting: build (x,y) series
XYList = pltc.plots2DSetXYList(dataXs, dataYs_All)
# -> add label
dataList = pltc.plots2DSetDataList(XYList, labelList)
# datalists
dataLists = [dataList[0:compNo], dataList[indexTemp]
] if processType != PROCESS_SETTING['ISO-THER'] else [dataList[0:compNo]]
# select datalist
_dataListsSelected = selectFromListByIndex([], dataLists)
# subplot result
# pltc.plots2DSub(_dataListsSelected, xLabelSet,
# yLabelSet, plotTitle)
# individual figure
for f in range(len(_dataListsSelected)):
pltc.plots2D(_dataListsSelected[f], xLabelSet,
yLabelSet[f], plotTitle_Update)
else:
dataPack = []
except Exception as e:
raise
| 34.569565
| 133
| 0.565778
| 1,553
| 15,902
| 5.673535
| 0.137154
| 0.022472
| 0.017024
| 0.033708
| 0.829304
| 0.800136
| 0.792419
| 0.761094
| 0.761094
| 0.755419
| 0
| 0.014072
| 0.343102
| 15,902
| 459
| 134
| 34.64488
| 0.829408
| 0.293611
| 0
| 0.703884
| 0
| 0
| 0.066243
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033981
| false
| 0
| 0.024272
| 0
| 0.082524
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8c003aed5f07c398f31faa94ad9d8e501692f7c7
| 173
|
py
|
Python
|
tests/parser/rewriting.projection.11.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/rewriting.projection.11.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/rewriting.projection.11.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
dummy.
c(0,0).
d :- c(_,a_long__but_still_nice_constant), dummy.
"""
output = """
dummy.
c(0,0).
d :- c(_,a_long__but_still_nice_constant), dummy.
"""
| 13.307692
| 50
| 0.606936
| 28
| 173
| 3.25
| 0.428571
| 0.131868
| 0.153846
| 0.175824
| 0.879121
| 0.879121
| 0.879121
| 0.879121
| 0.879121
| 0.879121
| 0
| 0.027972
| 0.17341
| 173
| 12
| 51
| 14.416667
| 0.608392
| 0
| 0
| 0.8
| 0
| 0
| 0.812121
| 0.448485
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
8c2f6122cbe5bc20bcebea5ebbafb9212b54532c
| 9,874
|
py
|
Python
|
engine/validation-test/storagetest/core/test_longhorn_basic.py
|
kaxing/longhorn-tests
|
e8e95f1446155f20cbbb9b47d8e139de09d567f0
|
[
"Apache-2.0"
] | 10
|
2021-01-25T00:52:46.000Z
|
2022-02-20T01:49:56.000Z
|
engine/validation-test/storagetest/core/test_longhorn_basic.py
|
kaxing/longhorn-tests
|
e8e95f1446155f20cbbb9b47d8e139de09d567f0
|
[
"Apache-2.0"
] | 273
|
2019-06-12T17:43:49.000Z
|
2022-03-29T09:06:02.000Z
|
engine/validation-test/storagetest/core/test_longhorn_basic.py
|
kaxing/longhorn-tests
|
e8e95f1446155f20cbbb9b47d8e139de09d567f0
|
[
"Apache-2.0"
] | 24
|
2019-06-12T04:03:00.000Z
|
2022-03-21T08:08:47.000Z
|
from common_fixtures import * # NOQA
import websocket as ws
import base64
import pytest
VOLUME_DRIVER = "rancher-longhorn"
STACK_NAME_PREFIX = "volume-"
CONTROLLER = "controller"
REPLICA = "replica"
def test_container_with_volume_execute(client, test_name):
volume_name = 'vol' + test_name
cleanup_items = []
cleanup_vols = []
try:
c = client.create_container(
name=test_name,
imageUuid=TEST_IMAGE_UUID,
networkMode=MANAGED_NETWORK,
dataVolumes=[volume_name + ":/vol"],
volumeDriver=VOLUME_DRIVER,
attachStdin=True,
attachStdout=True,
tty=True,
command='/bin/bash')
cleanup_items.append(c)
container = client.wait_success(c, timeout=120)
vols = client.list_volume(name=volume_name)
assert len(vols) == 1
cleanup_vols.append(vols[0])
test_msg = 'EXEC_WORKS'
assert_execute(container, test_msg)
finally:
delete_all(client, cleanup_items)
for volume in cleanup_vols:
volume = client.wait_success(client.delete(volume))
assert volume.state == "removed"
volume = client.wait_success(volume.purge())
assert volume.state == "purged"
def test_container_migrate_volume(client, test_name):
volume_name = 'vol' + test_name
hosts = client.list_host(kind='docker', removed_null=True)
assert len(hosts) > 2
test_msg = 'EXEC_WORKS'
cleanup_items = []
cleanup_vols = []
try:
c1 = client.create_container(
name=test_name,
imageUuid=TEST_IMAGE_UUID,
networkMode=MANAGED_NETWORK,
dataVolumes=[volume_name + ":/vol"],
volumeDriver=VOLUME_DRIVER,
requestedHostId=hosts[0].id,
attachStdin=True,
attachStdout=True,
tty=True,
command='/bin/bash')
cleanup_items.append(c1)
container = client.wait_success(c1, timeout=120)
vols = client.list_volume(name=volume_name)
assert len(vols) == 1
cleanup_vols.append(vols[0])
assert_execute(container, test_msg)
client.wait_success(client.delete(c1))
cleanup_items.remove(c1)
c2 = client.create_container(
name=test_name + "-2",
imageUuid=TEST_IMAGE_UUID,
networkMode=MANAGED_NETWORK,
dataVolumes=[volume_name + ":/vol"],
volumeDriver=VOLUME_DRIVER,
requestedHostId=hosts[1].id,
attachStdin=True,
attachStdout=True,
tty=True,
command='/bin/bash')
cleanup_items.append(c2)
container = client.wait_success(c2, timeout=180)
assert_read(container, test_msg)
finally:
delete_all(client, cleanup_items)
for volume in cleanup_vols:
volume = client.wait_success(client.delete(volume))
assert volume.state == "removed"
volume = client.wait_success(volume.purge())
assert volume.state == "purged"
def test_container_replica_down(admin_client, client, test_name):
volume_name = 'vol' + test_name
cleanup_items = []
cleanup_vols = []
try:
c = client.create_container(
name=test_name,
imageUuid=TEST_IMAGE_UUID,
networkMode=MANAGED_NETWORK,
dataVolumes=[volume_name + ":/vol"],
volumeDriver=VOLUME_DRIVER,
attachStdin=True,
attachStdout=True,
tty=True,
command='/bin/bash')
cleanup_items.append(c)
container = client.wait_success(c, timeout=120)
vols = client.list_volume(name=volume_name)
assert len(vols) == 1
cleanup_vols.append(vols[0])
test_msg = 'EXEC_WORKS'
assert_execute(container, test_msg)
replicas = get_replica_containers(admin_client, client, volume_name)
assert len(replicas) == 2
rep1 = client.wait_success(client.delete(replicas[0]))
assert rep1.state == 'removed'
# make sure data is intact
assert_read(container, test_msg)
test_msg = 'EXEC_WORKS_AFTER_REMOVE'
assert_execute(container, test_msg)
# TODO implement check of volume status, wait it to be UP
finally:
delete_all(client, cleanup_items)
for volume in cleanup_vols:
volume = client.wait_success(client.delete(volume))
assert volume.state == "removed"
volume = client.wait_success(volume.purge())
assert volume.state == "purged"
@pytest.mark.skip(reason="need a way to stop replica without HA it")
def test_container_both_replica_down_and_rebuild(
admin_client, client, test_name):
volume_name = 'vol' + test_name
cleanup_items = []
cleanup_vols = []
try:
c1 = client.create_container(
name=test_name,
imageUuid=TEST_IMAGE_UUID,
networkMode=MANAGED_NETWORK,
dataVolumes=[volume_name + ":/vol"],
volumeDriver=VOLUME_DRIVER,
attachStdin=True,
attachStdout=True,
tty=True,
command='/bin/bash')
cleanup_items.append(c1)
container = client.wait_success(c1, timeout=120)
vols = client.list_volume(name=volume_name)
assert len(vols) == 1
cleanup_vols.append(vols[0])
test_msg = 'EXEC_WORKS'
assert_execute(container, test_msg)
replicas = get_replica_containers(admin_client, client, volume_name)
assert len(replicas) == 2
rep1 = client.wait_success(replicas[0].stop())
assert rep1.state == 'stopped'
# make sure data is intact
assert_read(container, test_msg)
test_msg = 'EXEC_WORKS_AFTER_STOP'
assert_execute(container, test_msg)
rep2 = client.wait_success(replicas[1].stop())
assert rep2.state == 'stopped'
# now controller should be stopped, volume won't be available
controller = get_controller_container(
admin_client, client, volume_name)
con = client.wait_success(client.delete(controller))
assert con.state == 'removed'
print "wait_for container remove creation"
client.wait_success(client.delete(c1))
cleanup_items.remove(c1)
# now a new controller should be started, use recent replicas
# wait for volume to be in attached state again.
c2 = client.create_container(
name=test_name + "-2",
imageUuid=TEST_IMAGE_UUID,
networkMode=MANAGED_NETWORK,
dataVolumes=[volume_name + ":/vol"],
volumeDriver=VOLUME_DRIVER,
attachStdin=True,
attachStdout=True,
tty=True,
command='/bin/bash')
cleanup_items.append(c2)
print "wait_for new container creation"
container = client.wait_success(c2, timeout=180)
assert_read(container, test_msg)
# TODO implement check of volume status, wait it to be UP
finally:
delete_all(client, cleanup_items)
for volume in cleanup_vols:
volume = client.wait_success(client.delete(volume))
assert volume.state == "removed"
volume = client.wait_success(volume.purge())
assert volume.state == "purged"
def assert_execute(container, test_msg):
execute = container.execute(attachStdin=True,
attachStdout=True,
command=['/bin/bash', '-c',
'echo ' + test_msg +
' | tee /vol/test'],
tty=True)
conn = ws.create_connection(execute.url + '?token=' + execute.token,
timeout=10)
# Python is weird about closures
closure_wrapper = {
'result': ''
}
def exec_check():
msg = conn.recv()
closure_wrapper['result'] += base64.b64decode(msg)
return test_msg == closure_wrapper['result'].rstrip()
wait_for(exec_check,
'Timeout waiting for exec msg %s' % test_msg)
def assert_read(container, test_msg):
execute = container.execute(attachStdin=True,
attachStdout=True,
command=['/bin/bash', '-c',
'cat /vol/test'],
tty=True)
conn = ws.create_connection(execute.url + '?token=' + execute.token,
timeout=10)
# Python is weird about closures
closure_wrapper = {
'result': ''
}
def exec_check():
msg = conn.recv()
closure_wrapper['result'] += base64.b64decode(msg)
return test_msg == closure_wrapper['result'].rstrip()
wait_for(exec_check,
'Timeout waiting for exec msg %s' % test_msg)
def get_system_stack_name(volume_name):
return STACK_NAME_PREFIX + volume_name
def get_replica_containers(admin_client, client, volume_name):
stack_name = get_system_stack_name(volume_name)
stack, service = get_env_service_by_name(client, stack_name, REPLICA)
return get_service_containers(admin_client, service)
def get_controller_container(admin_client, client, volume_name):
stack_name = get_system_stack_name(volume_name)
stack, service = get_env_service_by_name(client, stack_name, CONTROLLER)
return get_service_containers(admin_client, service)[0]
| 32.913333
| 76
| 0.591452
| 1,061
| 9,874
| 5.26673
| 0.147974
| 0.048318
| 0.060845
| 0.044381
| 0.851467
| 0.827309
| 0.822298
| 0.80655
| 0.784001
| 0.784001
| 0
| 0.010816
| 0.316488
| 9,874
| 299
| 77
| 33.023411
| 0.817158
| 0.040004
| 0
| 0.792952
| 0
| 0
| 0.061576
| 0.004647
| 0
| 0
| 0
| 0.003344
| 0.136564
| 0
| null | null | 0
| 0.017621
| null | null | 0.008811
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8c3560bdad10c2961fbc0d5a0f891fd9e59e8138
| 26,371
|
py
|
Python
|
.c9/metadata/environment/dj_proposals_candidates/models.py
|
lfalvarez/proposals-core
|
4cbf4ca27ddb69919cfa218fc45f17ce1eef7ee5
|
[
"Apache-2.0"
] | null | null | null |
.c9/metadata/environment/dj_proposals_candidates/models.py
|
lfalvarez/proposals-core
|
4cbf4ca27ddb69919cfa218fc45f17ce1eef7ee5
|
[
"Apache-2.0"
] | null | null | null |
.c9/metadata/environment/dj_proposals_candidates/models.py
|
lfalvarez/proposals-core
|
4cbf4ca27ddb69919cfa218fc45f17ce1eef7ee5
|
[
"Apache-2.0"
] | null | null | null |
{"filter":false,"title":"models.py","tooltip":"/dj_proposals_candidates/models.py","undoManager":{"mark":100,"position":100,"stack":[[{"start":{"row":30,"column":36},"end":{"row":31,"column":0},"action":"insert","lines":["",""],"id":246},{"start":{"row":31,"column":0},"end":{"row":31,"column":4},"action":"insert","lines":[" "]},{"start":{"row":31,"column":4},"end":{"row":31,"column":5},"action":"insert","lines":["r"]},{"start":{"row":31,"column":5},"end":{"row":31,"column":6},"action":"insert","lines":["e"]},{"start":{"row":31,"column":6},"end":{"row":31,"column":7},"action":"insert","lines":["p"]},{"start":{"row":31,"column":7},"end":{"row":31,"column":8},"action":"insert","lines":["r"]},{"start":{"row":31,"column":8},"end":{"row":31,"column":9},"action":"insert","lines":["e"]},{"start":{"row":31,"column":9},"end":{"row":31,"column":10},"action":"insert","lines":["s"]},{"start":{"row":31,"column":10},"end":{"row":31,"column":11},"action":"insert","lines":["e"]},{"start":{"row":31,"column":11},"end":{"row":31,"column":12},"action":"insert","lines":["n"]},{"start":{"row":31,"column":12},"end":{"row":31,"column":13},"action":"insert","lines":["t"]}],[{"start":{"row":31,"column":13},"end":{"row":31,"column":14},"action":"insert","lines":["a"],"id":247},{"start":{"row":31,"column":14},"end":{"row":31,"column":15},"action":"insert","lines":["t"]},{"start":{"row":31,"column":15},"end":{"row":31,"column":16},"action":"insert","lines":["i"]},{"start":{"row":31,"column":16},"end":{"row":31,"column":17},"action":"insert","lines":["o"]},{"start":{"row":31,"column":17},"end":{"row":31,"column":18},"action":"insert","lines":["n"]}],[{"start":{"row":31,"column":18},"end":{"row":31,"column":19},"action":"insert","lines":[" "],"id":248},{"start":{"row":31,"column":19},"end":{"row":31,"column":20},"action":"insert","lines":["="]}],[{"start":{"row":31,"column":20},"end":{"row":31,"column":21},"action":"insert","lines":[" "],"id":249}],[{"start":{"row":31,"column":21},"end":{"row":31,"column":22},"action":"insert","lines":["N"],"id":250}],[{"start":{"row":31,"column":21},"end":{"row":31,"column":22},"action":"remove","lines":["N"],"id":251},{"start":{"row":31,"column":21},"end":{"row":31,"column":33},"action":"insert","lines":["NDArrayField"]}],[{"start":{"row":31,"column":33},"end":{"row":31,"column":35},"action":"insert","lines":["()"],"id":252}],[{"start":{"row":41,"column":12},"end":{"row":41,"column":30},"action":"insert","lines":["NamedAndDescripted"],"id":253}],[{"start":{"row":41,"column":30},"end":{"row":41,"column":31},"action":"insert","lines":[","],"id":254}],[{"start":{"row":41,"column":31},"end":{"row":41,"column":32},"action":"insert","lines":[" "],"id":255}],[{"start":{"row":45,"column":16},"end":{"row":45,"column":34},"action":"insert","lines":["NamedAndDescripted"],"id":256}],[{"start":{"row":45,"column":34},"end":{"row":45,"column":35},"action":"insert","lines":[","],"id":257}],[{"start":{"row":45,"column":35},"end":{"row":45,"column":36},"action":"insert","lines":[" "],"id":258}],[{"start":{"row":38,"column":4},"end":{"row":38,"column":8},"action":"remove","lines":["pass"],"id":259},{"start":{"row":38,"column":4},"end":{"row":38,"column":5},"action":"insert","lines":["c"]},{"start":{"row":38,"column":5},"end":{"row":38,"column":6},"action":"insert","lines":["a"]},{"start":{"row":38,"column":6},"end":{"row":38,"column":7},"action":"insert","lines":["n"]},{"start":{"row":38,"column":7},"end":{"row":38,"column":8},"action":"insert","lines":["d"]},{"start":{"row":38,"column":8},"end":{"row":38,"column":9},"action":"insert","lines":["i"]},{"start":{"row":38,"column":9},"end":{"row":38,"column":10},"action":"insert","lines":["d"]},{"start":{"row":38,"column":10},"end":{"row":38,"column":11},"action":"insert","lines":["a"]},{"start":{"row":38,"column":11},"end":{"row":38,"column":12},"action":"insert","lines":["t"]},{"start":{"row":38,"column":12},"end":{"row":38,"column":13},"action":"insert","lines":["e"]}],[{"start":{"row":38,"column":13},"end":{"row":38,"column":14},"action":"insert","lines":[" "],"id":260},{"start":{"row":38,"column":14},"end":{"row":38,"column":15},"action":"insert","lines":["="]}],[{"start":{"row":38,"column":15},"end":{"row":38,"column":16},"action":"insert","lines":[" "],"id":261},{"start":{"row":38,"column":16},"end":{"row":38,"column":17},"action":"insert","lines":["m"]},{"start":{"row":38,"column":17},"end":{"row":38,"column":18},"action":"insert","lines":["o"]},{"start":{"row":38,"column":18},"end":{"row":38,"column":19},"action":"insert","lines":["d"]},{"start":{"row":38,"column":19},"end":{"row":38,"column":20},"action":"insert","lines":["e"]}],[{"start":{"row":38,"column":20},"end":{"row":38,"column":21},"action":"insert","lines":["l"],"id":262},{"start":{"row":38,"column":21},"end":{"row":38,"column":22},"action":"insert","lines":["s"]},{"start":{"row":38,"column":22},"end":{"row":38,"column":23},"action":"insert","lines":["."]},{"start":{"row":38,"column":23},"end":{"row":38,"column":24},"action":"insert","lines":["F"]},{"start":{"row":38,"column":24},"end":{"row":38,"column":25},"action":"insert","lines":["o"]},{"start":{"row":38,"column":25},"end":{"row":38,"column":26},"action":"insert","lines":["r"]},{"start":{"row":38,"column":26},"end":{"row":38,"column":27},"action":"insert","lines":["e"]}],[{"start":{"row":38,"column":23},"end":{"row":38,"column":27},"action":"remove","lines":["Fore"],"id":263},{"start":{"row":38,"column":23},"end":{"row":38,"column":33},"action":"insert","lines":["ForeignKey"]}],[{"start":{"row":38,"column":33},"end":{"row":38,"column":35},"action":"insert","lines":["()"],"id":264}],[{"start":{"row":38,"column":34},"end":{"row":38,"column":35},"action":"insert","lines":["C"],"id":265}],[{"start":{"row":38,"column":34},"end":{"row":38,"column":35},"action":"remove","lines":["C"],"id":266},{"start":{"row":38,"column":34},"end":{"row":38,"column":43},"action":"insert","lines":["Candidate"]}],[{"start":{"row":38,"column":44},"end":{"row":39,"column":0},"action":"insert","lines":["",""],"id":267},{"start":{"row":39,"column":0},"end":{"row":39,"column":4},"action":"insert","lines":[" "]},{"start":{"row":39,"column":4},"end":{"row":39,"column":5},"action":"insert","lines":["p"]},{"start":{"row":39,"column":5},"end":{"row":39,"column":6},"action":"insert","lines":["r"]},{"start":{"row":39,"column":6},"end":{"row":39,"column":7},"action":"insert","lines":["o"]},{"start":{"row":39,"column":7},"end":{"row":39,"column":8},"action":"insert","lines":["p"]},{"start":{"row":39,"column":8},"end":{"row":39,"column":9},"action":"insert","lines":["o"]}],[{"start":{"row":39,"column":9},"end":{"row":39,"column":10},"action":"insert","lines":["s"],"id":268},{"start":{"row":39,"column":10},"end":{"row":39,"column":11},"action":"insert","lines":["a"]},{"start":{"row":39,"column":11},"end":{"row":39,"column":12},"action":"insert","lines":["l"]}],[{"start":{"row":39,"column":12},"end":{"row":39,"column":13},"action":"insert","lines":[" "],"id":269},{"start":{"row":39,"column":13},"end":{"row":39,"column":14},"action":"insert","lines":["="]}],[{"start":{"row":39,"column":14},"end":{"row":39,"column":15},"action":"insert","lines":[" "],"id":270}],[{"start":{"row":39,"column":15},"end":{"row":39,"column":16},"action":"insert","lines":["m"],"id":271},{"start":{"row":39,"column":16},"end":{"row":39,"column":17},"action":"insert","lines":["o"]},{"start":{"row":39,"column":17},"end":{"row":39,"column":18},"action":"insert","lines":["d"]},{"start":{"row":39,"column":18},"end":{"row":39,"column":19},"action":"insert","lines":["e"]},{"start":{"row":39,"column":19},"end":{"row":39,"column":20},"action":"insert","lines":["l"]},{"start":{"row":39,"column":20},"end":{"row":39,"column":21},"action":"insert","lines":["s"]},{"start":{"row":39,"column":21},"end":{"row":39,"column":22},"action":"insert","lines":["."]},{"start":{"row":39,"column":22},"end":{"row":39,"column":23},"action":"insert","lines":["F"]}],[{"start":{"row":39,"column":23},"end":{"row":39,"column":24},"action":"insert","lines":["o"],"id":272}],[{"start":{"row":39,"column":22},"end":{"row":39,"column":24},"action":"remove","lines":["Fo"],"id":273},{"start":{"row":39,"column":22},"end":{"row":39,"column":32},"action":"insert","lines":["ForeignKey"]}],[{"start":{"row":39,"column":32},"end":{"row":39,"column":34},"action":"insert","lines":["()"],"id":274}],[{"start":{"row":39,"column":33},"end":{"row":39,"column":34},"action":"insert","lines":["P"],"id":275},{"start":{"row":39,"column":34},"end":{"row":39,"column":35},"action":"insert","lines":["r"]},{"start":{"row":39,"column":35},"end":{"row":39,"column":36},"action":"insert","lines":["o"]}],[{"start":{"row":39,"column":33},"end":{"row":39,"column":36},"action":"remove","lines":["Pro"],"id":276},{"start":{"row":39,"column":33},"end":{"row":39,"column":41},"action":"insert","lines":["Proposal"]}],[{"start":{"row":38,"column":43},"end":{"row":38,"column":44},"action":"insert","lines":[","],"id":277}],[{"start":{"row":38,"column":44},"end":{"row":38,"column":45},"action":"insert","lines":[" "],"id":278},{"start":{"row":38,"column":45},"end":{"row":38,"column":46},"action":"insert","lines":["r"]},{"start":{"row":38,"column":46},"end":{"row":38,"column":47},"action":"insert","lines":["e"]},{"start":{"row":38,"column":47},"end":{"row":38,"column":48},"action":"insert","lines":["l"]}],[{"start":{"row":38,"column":45},"end":{"row":38,"column":48},"action":"remove","lines":["rel"],"id":279},{"start":{"row":38,"column":45},"end":{"row":38,"column":57},"action":"insert","lines":["related_name"]}],[{"start":{"row":38,"column":57},"end":{"row":38,"column":58},"action":"insert","lines":["="],"id":280}],[{"start":{"row":38,"column":58},"end":{"row":38,"column":59},"action":"insert","lines":[" "],"id":281}],[{"start":{"row":38,"column":58},"end":{"row":38,"column":59},"action":"remove","lines":[" "],"id":282}],[{"start":{"row":38,"column":58},"end":{"row":38,"column":60},"action":"insert","lines":["''"],"id":283}],[{"start":{"row":38,"column":59},"end":{"row":38,"column":60},"action":"insert","lines":[" "],"id":284}],[{"start":{"row":38,"column":59},"end":{"row":38,"column":60},"action":"remove","lines":[" "],"id":285}],[{"start":{"row":38,"column":59},"end":{"row":38,"column":60},"action":"insert","lines":["c"],"id":286},{"start":{"row":38,"column":60},"end":{"row":38,"column":61},"action":"insert","lines":["o"]},{"start":{"row":38,"column":61},"end":{"row":38,"column":62},"action":"insert","lines":["m"]},{"start":{"row":38,"column":62},"end":{"row":38,"column":63},"action":"insert","lines":["m"]},{"start":{"row":38,"column":63},"end":{"row":38,"column":64},"action":"insert","lines":["i"]},{"start":{"row":38,"column":64},"end":{"row":38,"column":65},"action":"insert","lines":["t"]},{"start":{"row":38,"column":65},"end":{"row":38,"column":66},"action":"insert","lines":["m"]},{"start":{"row":38,"column":66},"end":{"row":38,"column":67},"action":"insert","lines":["e"]},{"start":{"row":38,"column":67},"end":{"row":38,"column":68},"action":"insert","lines":["n"]},{"start":{"row":38,"column":68},"end":{"row":38,"column":69},"action":"insert","lines":["t"]},{"start":{"row":38,"column":69},"end":{"row":38,"column":70},"action":"insert","lines":["s"]}],[{"start":{"row":39,"column":41},"end":{"row":39,"column":42},"action":"insert","lines":[","],"id":287}],[{"start":{"row":39,"column":42},"end":{"row":39,"column":43},"action":"insert","lines":[" "],"id":288},{"start":{"row":39,"column":43},"end":{"row":39,"column":44},"action":"insert","lines":["r"]},{"start":{"row":39,"column":44},"end":{"row":39,"column":45},"action":"insert","lines":["e"]},{"start":{"row":39,"column":45},"end":{"row":39,"column":46},"action":"insert","lines":["l"]}],[{"start":{"row":39,"column":43},"end":{"row":39,"column":46},"action":"remove","lines":["rel"],"id":289},{"start":{"row":39,"column":43},"end":{"row":39,"column":55},"action":"insert","lines":["related_name"]}],[{"start":{"row":39,"column":55},"end":{"row":39,"column":56},"action":"insert","lines":["="],"id":290}],[{"start":{"row":39,"column":56},"end":{"row":39,"column":58},"action":"insert","lines":["''"],"id":291}],[{"start":{"row":39,"column":57},"end":{"row":39,"column":58},"action":"insert","lines":[" "],"id":292}],[{"start":{"row":39,"column":57},"end":{"row":39,"column":58},"action":"remove","lines":[" "],"id":293}],[{"start":{"row":39,"column":57},"end":{"row":39,"column":58},"action":"insert","lines":["c"],"id":294},{"start":{"row":39,"column":58},"end":{"row":39,"column":59},"action":"insert","lines":["o"]},{"start":{"row":39,"column":59},"end":{"row":39,"column":60},"action":"insert","lines":["m"]},{"start":{"row":39,"column":60},"end":{"row":39,"column":61},"action":"insert","lines":["m"]},{"start":{"row":39,"column":61},"end":{"row":39,"column":62},"action":"insert","lines":["i"]},{"start":{"row":39,"column":62},"end":{"row":39,"column":63},"action":"insert","lines":["t"]}],[{"start":{"row":39,"column":62},"end":{"row":39,"column":63},"action":"remove","lines":["t"],"id":295}],[{"start":{"row":39,"column":62},"end":{"row":39,"column":63},"action":"insert","lines":["t"],"id":296},{"start":{"row":39,"column":63},"end":{"row":39,"column":64},"action":"insert","lines":["m"]},{"start":{"row":39,"column":64},"end":{"row":39,"column":65},"action":"insert","lines":["e"]},{"start":{"row":39,"column":65},"end":{"row":39,"column":66},"action":"insert","lines":["n"]},{"start":{"row":39,"column":66},"end":{"row":39,"column":67},"action":"insert","lines":["t"]},{"start":{"row":39,"column":67},"end":{"row":39,"column":68},"action":"insert","lines":["s"]}],[{"start":{"row":38,"column":71},"end":{"row":38,"column":72},"action":"insert","lines":[","],"id":297}],[{"start":{"row":38,"column":72},"end":{"row":38,"column":73},"action":"insert","lines":[" "],"id":298},{"start":{"row":38,"column":73},"end":{"row":38,"column":74},"action":"insert","lines":["o"]},{"start":{"row":38,"column":74},"end":{"row":38,"column":75},"action":"insert","lines":["n"]}],[{"start":{"row":38,"column":73},"end":{"row":38,"column":75},"action":"remove","lines":["on"],"id":299},{"start":{"row":38,"column":73},"end":{"row":38,"column":82},"action":"insert","lines":["on_delete"]}],[{"start":{"row":38,"column":81},"end":{"row":38,"column":82},"action":"remove","lines":["e"],"id":300}],[{"start":{"row":38,"column":81},"end":{"row":38,"column":82},"action":"insert","lines":["e"],"id":301},{"start":{"row":38,"column":82},"end":{"row":38,"column":83},"action":"insert","lines":["="]}],[{"start":{"row":38,"column":83},"end":{"row":38,"column":85},"action":"insert","lines":["''"],"id":302}],[{"start":{"row":38,"column":84},"end":{"row":38,"column":85},"action":"insert","lines":[" "],"id":303}],[{"start":{"row":38,"column":84},"end":{"row":38,"column":85},"action":"remove","lines":[" "],"id":304}],[{"start":{"row":38,"column":84},"end":{"row":38,"column":85},"action":"insert","lines":["C"],"id":305},{"start":{"row":38,"column":85},"end":{"row":38,"column":86},"action":"insert","lines":["a"]},{"start":{"row":38,"column":86},"end":{"row":38,"column":87},"action":"insert","lines":["s"]}],[{"start":{"row":38,"column":86},"end":{"row":38,"column":87},"action":"remove","lines":["s"],"id":306},{"start":{"row":38,"column":85},"end":{"row":38,"column":86},"action":"remove","lines":["a"]}],[{"start":{"row":38,"column":85},"end":{"row":38,"column":86},"action":"insert","lines":["A"],"id":307},{"start":{"row":38,"column":86},"end":{"row":38,"column":87},"action":"insert","lines":["S"]}],[{"start":{"row":38,"column":86},"end":{"row":38,"column":87},"action":"remove","lines":["S"],"id":308},{"start":{"row":38,"column":85},"end":{"row":38,"column":86},"action":"remove","lines":["A"]},{"start":{"row":38,"column":84},"end":{"row":38,"column":85},"action":"remove","lines":["C"]},{"start":{"row":38,"column":83},"end":{"row":38,"column":85},"action":"remove","lines":["''"]}],[{"start":{"row":38,"column":83},"end":{"row":38,"column":84},"action":"insert","lines":["m"],"id":309},{"start":{"row":38,"column":84},"end":{"row":38,"column":85},"action":"insert","lines":["o"]},{"start":{"row":38,"column":85},"end":{"row":38,"column":86},"action":"insert","lines":["d"]},{"start":{"row":38,"column":86},"end":{"row":38,"column":87},"action":"insert","lines":["e"]},{"start":{"row":38,"column":87},"end":{"row":38,"column":88},"action":"insert","lines":["l"]},{"start":{"row":38,"column":88},"end":{"row":38,"column":89},"action":"insert","lines":["s"]},{"start":{"row":38,"column":89},"end":{"row":38,"column":90},"action":"insert","lines":["."]},{"start":{"row":38,"column":90},"end":{"row":38,"column":91},"action":"insert","lines":["C"]},{"start":{"row":38,"column":91},"end":{"row":38,"column":92},"action":"insert","lines":["A"]},{"start":{"row":38,"column":92},"end":{"row":38,"column":93},"action":"insert","lines":["S"]}],[{"start":{"row":38,"column":90},"end":{"row":38,"column":93},"action":"remove","lines":["CAS"],"id":310},{"start":{"row":38,"column":90},"end":{"row":38,"column":99},"action":"insert","lines":["CASCADE()"]}],[{"start":{"row":38,"column":97},"end":{"row":38,"column":99},"action":"remove","lines":["()"],"id":311}],[{"start":{"row":39,"column":69},"end":{"row":39,"column":95},"action":"insert","lines":[", on_delete=models.CASCADE"],"id":312}],[{"start":{"row":24,"column":87},"end":{"row":24,"column":113},"action":"insert","lines":[", on_delete=models.CASCADE"],"id":313}],[{"start":{"row":25,"column":95},"end":{"row":25,"column":121},"action":"insert","lines":[", on_delete=models.CASCADE"],"id":314}],[{"start":{"row":29,"column":50},"end":{"row":29,"column":51},"action":"insert","lines":["\\"],"id":315}],[{"start":{"row":29,"column":50},"end":{"row":29,"column":51},"action":"remove","lines":["\\"],"id":316}],[{"start":{"row":29,"column":50},"end":{"row":30,"column":0},"action":"insert","lines":["",""],"id":317},{"start":{"row":30,"column":0},"end":{"row":30,"column":4},"action":"insert","lines":[" "]},{"start":{"row":30,"column":4},"end":{"row":30,"column":5},"action":"insert","lines":["t"]},{"start":{"row":30,"column":5},"end":{"row":30,"column":6},"action":"insert","lines":["e"]},{"start":{"row":30,"column":6},"end":{"row":30,"column":7},"action":"insert","lines":["r"]},{"start":{"row":30,"column":7},"end":{"row":30,"column":8},"action":"insert","lines":["r"]},{"start":{"row":30,"column":8},"end":{"row":30,"column":9},"action":"insert","lines":["i"]},{"start":{"row":30,"column":9},"end":{"row":30,"column":10},"action":"insert","lines":["t"]},{"start":{"row":30,"column":10},"end":{"row":30,"column":11},"action":"insert","lines":["o"]},{"start":{"row":30,"column":11},"end":{"row":30,"column":12},"action":"insert","lines":["r"]},{"start":{"row":30,"column":12},"end":{"row":30,"column":13},"action":"insert","lines":["y"]}],[{"start":{"row":30,"column":13},"end":{"row":30,"column":14},"action":"insert","lines":[" "],"id":318},{"start":{"row":30,"column":14},"end":{"row":30,"column":15},"action":"insert","lines":["="]}],[{"start":{"row":30,"column":15},"end":{"row":30,"column":16},"action":"insert","lines":[" "],"id":319},{"start":{"row":30,"column":16},"end":{"row":30,"column":17},"action":"insert","lines":["m"]},{"start":{"row":30,"column":17},"end":{"row":30,"column":18},"action":"insert","lines":["o"]},{"start":{"row":30,"column":18},"end":{"row":30,"column":19},"action":"insert","lines":["e"]},{"start":{"row":30,"column":19},"end":{"row":30,"column":20},"action":"insert","lines":["l"]}],[{"start":{"row":30,"column":19},"end":{"row":30,"column":20},"action":"remove","lines":["l"],"id":320},{"start":{"row":30,"column":18},"end":{"row":30,"column":19},"action":"remove","lines":["e"]}],[{"start":{"row":30,"column":18},"end":{"row":30,"column":19},"action":"insert","lines":["d"],"id":321},{"start":{"row":30,"column":19},"end":{"row":30,"column":20},"action":"insert","lines":["e"]},{"start":{"row":30,"column":20},"end":{"row":30,"column":21},"action":"insert","lines":["l"]},{"start":{"row":30,"column":21},"end":{"row":30,"column":22},"action":"insert","lines":["s"]},{"start":{"row":30,"column":22},"end":{"row":30,"column":23},"action":"insert","lines":["."]}],[{"start":{"row":30,"column":23},"end":{"row":30,"column":24},"action":"insert","lines":["F"],"id":322},{"start":{"row":30,"column":24},"end":{"row":30,"column":25},"action":"insert","lines":["o"]},{"start":{"row":30,"column":25},"end":{"row":30,"column":26},"action":"insert","lines":["r"]},{"start":{"row":30,"column":26},"end":{"row":30,"column":27},"action":"insert","lines":["e"]}],[{"start":{"row":30,"column":23},"end":{"row":30,"column":27},"action":"remove","lines":["Fore"],"id":323},{"start":{"row":30,"column":23},"end":{"row":30,"column":33},"action":"insert","lines":["ForeignKey"]}],[{"start":{"row":30,"column":33},"end":{"row":30,"column":35},"action":"insert","lines":["()"],"id":324}],[{"start":{"row":30,"column":34},"end":{"row":30,"column":35},"action":"insert","lines":["T"],"id":325},{"start":{"row":30,"column":35},"end":{"row":30,"column":36},"action":"insert","lines":["e"]}],[{"start":{"row":30,"column":34},"end":{"row":30,"column":36},"action":"remove","lines":["Te"],"id":326},{"start":{"row":30,"column":34},"end":{"row":30,"column":43},"action":"insert","lines":["Territory"]}],[{"start":{"row":30,"column":43},"end":{"row":30,"column":44},"action":"insert","lines":[","],"id":327}],[{"start":{"row":30,"column":44},"end":{"row":30,"column":45},"action":"insert","lines":[" "],"id":328},{"start":{"row":30,"column":45},"end":{"row":30,"column":46},"action":"insert","lines":["r"]},{"start":{"row":30,"column":46},"end":{"row":30,"column":47},"action":"insert","lines":["e"]},{"start":{"row":30,"column":47},"end":{"row":30,"column":48},"action":"insert","lines":["l"]},{"start":{"row":30,"column":48},"end":{"row":30,"column":49},"action":"insert","lines":["a"]}],[{"start":{"row":30,"column":45},"end":{"row":30,"column":49},"action":"remove","lines":["rela"],"id":329},{"start":{"row":30,"column":45},"end":{"row":30,"column":57},"action":"insert","lines":["related_name"]}],[{"start":{"row":30,"column":57},"end":{"row":30,"column":58},"action":"insert","lines":["'"],"id":330}],[{"start":{"row":30,"column":58},"end":{"row":30,"column":59},"action":"insert","lines":[" "],"id":331}],[{"start":{"row":30,"column":58},"end":{"row":30,"column":59},"action":"remove","lines":[" "],"id":332},{"start":{"row":30,"column":57},"end":{"row":30,"column":58},"action":"remove","lines":["'"]}],[{"start":{"row":30,"column":57},"end":{"row":30,"column":58},"action":"insert","lines":["="],"id":333}],[{"start":{"row":30,"column":58},"end":{"row":30,"column":60},"action":"insert","lines":["''"],"id":334}],[{"start":{"row":30,"column":59},"end":{"row":30,"column":60},"action":"insert","lines":[" "],"id":335}],[{"start":{"row":30,"column":59},"end":{"row":30,"column":60},"action":"remove","lines":[" "],"id":336}],[{"start":{"row":30,"column":59},"end":{"row":30,"column":60},"action":"insert","lines":["p"],"id":337},{"start":{"row":30,"column":60},"end":{"row":30,"column":61},"action":"insert","lines":["r"]},{"start":{"row":30,"column":61},"end":{"row":30,"column":62},"action":"insert","lines":["o"]},{"start":{"row":30,"column":62},"end":{"row":30,"column":63},"action":"insert","lines":["p"]},{"start":{"row":30,"column":63},"end":{"row":30,"column":64},"action":"insert","lines":["o"]},{"start":{"row":30,"column":64},"end":{"row":30,"column":65},"action":"insert","lines":["s"]},{"start":{"row":30,"column":65},"end":{"row":30,"column":66},"action":"insert","lines":["a"]},{"start":{"row":30,"column":66},"end":{"row":30,"column":67},"action":"insert","lines":["l"]},{"start":{"row":30,"column":67},"end":{"row":30,"column":68},"action":"insert","lines":["s"]}],[{"start":{"row":10,"column":18},"end":{"row":10,"column":27},"action":"remove","lines":["AutoField"],"id":338},{"start":{"row":10,"column":18},"end":{"row":10,"column":31},"action":"insert","lines":["AutoSlugField"]}],[{"start":{"row":10,"column":11},"end":{"row":10,"column":17},"action":"remove","lines":["models"],"id":339},{"start":{"row":10,"column":11},"end":{"row":10,"column":12},"action":"remove","lines":["."]}],[{"start":{"row":5,"column":0},"end":{"row":5,"column":34},"action":"remove","lines":["from autoslug import AutoSlugField"],"id":340},{"start":{"row":5,"column":0},"end":{"row":6,"column":0},"action":"insert","lines":["from autoslug import AutoSlugField",""]}],[{"start":{"row":5,"column":34},"end":{"row":6,"column":0},"action":"remove","lines":["",""],"id":341}],[{"start":{"row":10,"column":4},"end":{"row":10,"column":46},"action":"remove","lines":["slug = AutoSlugField(populate_from='name')"],"id":342},{"start":{"row":10,"column":4},"end":{"row":10,"column":47},"action":"insert","lines":["slug = AutoSlugField(populate_from='title')"]}],[{"start":{"row":10,"column":40},"end":{"row":10,"column":45},"action":"remove","lines":["title"],"id":343},{"start":{"row":10,"column":40},"end":{"row":10,"column":41},"action":"insert","lines":["n"]},{"start":{"row":10,"column":41},"end":{"row":10,"column":42},"action":"insert","lines":["a"]},{"start":{"row":10,"column":42},"end":{"row":10,"column":43},"action":"insert","lines":["m"]},{"start":{"row":10,"column":43},"end":{"row":10,"column":44},"action":"insert","lines":["e"]}],[{"start":{"row":30,"column":69},"end":{"row":30,"column":95},"action":"insert","lines":[", on_delete=models.CASCADE"],"id":344}],[{"start":{"row":29,"column":11},"end":{"row":29,"column":18},"action":"remove","lines":["models."],"id":345}],[{"start":{"row":29,"column":15},"end":{"row":29,"column":16},"action":"insert","lines":["S"],"id":346},{"start":{"row":29,"column":16},"end":{"row":29,"column":17},"action":"insert","lines":["l"]},{"start":{"row":29,"column":17},"end":{"row":29,"column":18},"action":"insert","lines":["u"]},{"start":{"row":29,"column":18},"end":{"row":29,"column":19},"action":"insert","lines":["g"]}]]},"ace":{"folds":[],"scrolltop":232.5,"scrollleft":0,"selection":{"start":{"row":29,"column":20},"end":{"row":29,"column":20},"isBackwards":false},"options":{"guessTabSize":true,"useWrapMode":false,"wrapToView":true},"firstLineState":{"row":15,"state":"start","mode":"ace/mode/python"}},"timestamp":1609900438434,"hash":"913a18f91d928b08d22e16ce70f1f5ef8f82f575"}
| 26,371
| 26,371
| 0.55379
| 3,849
| 26,371
| 3.791115
| 0.070928
| 0.142544
| 0.25514
| 0.099781
| 0.757675
| 0.660567
| 0.572711
| 0.533923
| 0.277207
| 0.169613
| 0
| 0.090049
| 0.001972
| 26,371
| 1
| 26,371
| 26,371
| 0.464379
| 0
| 0
| 0
| 0
| 0
| 0.468944
| 0.009138
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 9
|
8c53ba041b7674df63a04b95a1eee299488d133d
| 3,917
|
py
|
Python
|
examples/python/test_binops.py
|
SmartEconomyWorkshop/workshop
|
5961dcc8832f60b3a0407cb9a8361ba5485ac280
|
[
"MIT"
] | 79
|
2017-10-22T03:35:06.000Z
|
2021-12-02T10:28:06.000Z
|
examples/python/test_binops.py
|
SmartEconomyWorkshop/workshop
|
5961dcc8832f60b3a0407cb9a8361ba5485ac280
|
[
"MIT"
] | 122
|
2017-10-19T12:34:08.000Z
|
2020-08-20T12:38:17.000Z
|
examples/python/test_binops.py
|
SmartEconomyWorkshop/workshop
|
5961dcc8832f60b3a0407cb9a8361ba5485ac280
|
[
"MIT"
] | 76
|
2017-10-19T05:09:55.000Z
|
2020-12-08T12:03:59.000Z
|
from boa_test.tests.boa_test import BoaTest
from boa.compiler import Compiler
from neo.Prompt.Commands.BuildNRun import TestBuild
class TestContract(BoaTest):
def test_binops(self):
output = Compiler.instance().load('%s/boa_test/example/BinopTest.py' % TestContract.dirname).default
out = output.write()
tx, results, total_ops, engine = TestBuild(out, ['&', 4, 4], self.GetWallet1(), '', '07')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].GetBigInteger(), 4)
tx, results, total_ops, engine = TestBuild(out, ['|', 4, 3], self.GetWallet1(), '', '07')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].GetBigInteger(), 7)
tx, results, total_ops, engine = TestBuild(out, ['|', 4, 8], self.GetWallet1(), '', '07')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].GetBigInteger(), 12)
tx, results, total_ops, engine = TestBuild(out, ['^', 4, 4], self.GetWallet1(), '', '07')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].GetBigInteger(), 0)
tx, results, total_ops, engine = TestBuild(out, ['^', 4, 2], self.GetWallet1(), '', '07')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].GetBigInteger(), 6)
# tx, results, total_ops, engine = TestBuild(out, ['>>', 16, 2], self.GetWallet1(), '', '07')
# self.assertEqual(len(results), 1)
# self.assertEqual(results[0].GetBigInteger(), 4)
# tx, results, total_ops, engine = TestBuild(out, ['>>', 16, 0], self.GetWallet1(), '', '07')
# self.assertEqual(len(results), 1)
# self.assertEqual(results[0].GetBigInteger(), 16)
#
# tx, results, total_ops, engine = TestBuild(out, ['>>', 11, 1], self.GetWallet1(), '', '07')
# self.assertEqual(len(results), 1)
# self.assertEqual(results[0].GetBigInteger(), 5)
#
# tx, results, total_ops, engine = TestBuild(out, ['<<', 16, 2], self.GetWallet1(), '', '07')
# self.assertEqual(len(results), 1)
# self.assertEqual(results[0].GetBigInteger(), 64)
#
# tx, results, total_ops, engine = TestBuild(out, ['<<', 16, -2], self.GetWallet1(), '', '07')
# self.assertEqual(len(results), 0)
#
# tx, results, total_ops, engine = TestBuild(out, ['<<', 4, 5], self.GetWallet1(), '', '07')
# self.assertEqual(len(results), 1)
# self.assertEqual(results[0].GetBigInteger(), 128)
tx, results, total_ops, engine = TestBuild(out, ['%', 16, 2], self.GetWallet1(), '', '07')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].GetBigInteger(), 0)
tx, results, total_ops, engine = TestBuild(out, ['%', 16, 11], self.GetWallet1(), '', '07')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].GetBigInteger(), 5)
tx, results, total_ops, engine = TestBuild(out, ['//', 16, 2], self.GetWallet1(), '', '07')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].GetBigInteger(), 8)
tx, results, total_ops, engine = TestBuild(out, ['//', 16, 7], self.GetWallet1(), '', '07')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].GetBigInteger(), 2)
tx, results, total_ops, engine = TestBuild(out, ['/', 16, 7], self.GetWallet1(), '', '07')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].GetBigInteger(), 2)
tx, results, total_ops, engine = TestBuild(out, ['~', 16, 0], self.GetWallet1(), '', '07')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].GetBigInteger(), -17)
tx, results, total_ops, engine = TestBuild(out, ['~', -3, 0], self.GetWallet1(), '', '07')
self.assertEqual(len(results), 1)
self.assertEqual(results[0].GetBigInteger(), 2)
| 47.192771
| 108
| 0.595864
| 451
| 3,917
| 5.126386
| 0.117517
| 0.227076
| 0.108997
| 0.132353
| 0.884948
| 0.884948
| 0.884948
| 0.854671
| 0.837803
| 0.821367
| 0
| 0.051746
| 0.21062
| 3,917
| 82
| 109
| 47.768293
| 0.69599
| 0.255042
| 0
| 0.395349
| 0
| 0
| 0.024171
| 0.01105
| 0
| 0
| 0
| 0
| 0.55814
| 1
| 0.023256
| false
| 0
| 0.069767
| 0
| 0.116279
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
8b1597897a900fe0cc8fef0b8224a486833ec117
| 65,362
|
py
|
Python
|
gcloud/google-cloud-sdk/lib/googlecloudsdk/third_party/apis/dlp/v2/dlp_v2_client.py
|
bopopescu/JobSniperRails
|
39e7f871887176770de0f4fc6789e9ddc7f32b1f
|
[
"MIT"
] | null | null | null |
gcloud/google-cloud-sdk/lib/googlecloudsdk/third_party/apis/dlp/v2/dlp_v2_client.py
|
bopopescu/JobSniperRails
|
39e7f871887176770de0f4fc6789e9ddc7f32b1f
|
[
"MIT"
] | 11
|
2020-02-29T02:51:12.000Z
|
2022-03-30T23:20:08.000Z
|
gcloud/google-cloud-sdk/lib/googlecloudsdk/third_party/apis/dlp/v2/dlp_v2_client.py
|
bopopescu/JobSniperRails
|
39e7f871887176770de0f4fc6789e9ddc7f32b1f
|
[
"MIT"
] | 1
|
2020-07-24T18:47:35.000Z
|
2020-07-24T18:47:35.000Z
|
"""Generated client library for dlp version v2."""
# NOTE: This file is autogenerated and should not be edited by hand.
from apitools.base.py import base_api
from googlecloudsdk.third_party.apis.dlp.v2 import dlp_v2_messages as messages
class DlpV2(base_api.BaseApiClient):
"""Generated client library for service dlp version v2."""
MESSAGES_MODULE = messages
BASE_URL = u'https://dlp.googleapis.com/'
_PACKAGE = u'dlp'
_SCOPES = [u'https://www.googleapis.com/auth/cloud-platform']
_VERSION = u'v2'
_CLIENT_ID = '1042881264118.apps.googleusercontent.com'
_CLIENT_SECRET = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_USER_AGENT = 'x_Tw5K8nnjoRAqULM9PFAC2b'
_CLIENT_CLASS_NAME = u'DlpV2'
_URL_VERSION = u'v2'
_API_KEY = None
def __init__(self, url='', credentials=None,
get_credentials=True, http=None, model=None,
log_request=False, log_response=False,
credentials_args=None, default_global_params=None,
additional_http_headers=None, response_encoding=None):
"""Create a new dlp handle."""
url = url or self.BASE_URL
super(DlpV2, self).__init__(
url, credentials=credentials,
get_credentials=get_credentials, http=http, model=model,
log_request=log_request, log_response=log_response,
credentials_args=credentials_args,
default_global_params=default_global_params,
additional_http_headers=additional_http_headers,
response_encoding=response_encoding)
self.infoTypes = self.InfoTypesService(self)
self.organizations_deidentifyTemplates = self.OrganizationsDeidentifyTemplatesService(self)
self.organizations_inspectTemplates = self.OrganizationsInspectTemplatesService(self)
self.organizations_storedInfoTypes = self.OrganizationsStoredInfoTypesService(self)
self.organizations = self.OrganizationsService(self)
self.projects_content = self.ProjectsContentService(self)
self.projects_deidentifyTemplates = self.ProjectsDeidentifyTemplatesService(self)
self.projects_dlpJobs = self.ProjectsDlpJobsService(self)
self.projects_image = self.ProjectsImageService(self)
self.projects_inspectTemplates = self.ProjectsInspectTemplatesService(self)
self.projects_jobTriggers = self.ProjectsJobTriggersService(self)
self.projects_locations_content = self.ProjectsLocationsContentService(self)
self.projects_locations = self.ProjectsLocationsService(self)
self.projects_storedInfoTypes = self.ProjectsStoredInfoTypesService(self)
self.projects = self.ProjectsService(self)
class InfoTypesService(base_api.BaseApiService):
"""Service class for the infoTypes resource."""
_NAME = u'infoTypes'
def __init__(self, client):
super(DlpV2.InfoTypesService, self).__init__(client)
self._upload_configs = {
}
def List(self, request, global_params=None):
r"""Returns a list of the sensitive information types that the DLP API.
supports. See https://cloud.google.com/dlp/docs/infotypes-reference to
learn more.
Args:
request: (DlpInfoTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListInfoTypesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
http_method=u'GET',
method_id=u'dlp.infoTypes.list',
ordered_params=[],
path_params=[],
query_params=[u'filter', u'languageCode'],
relative_path=u'v2/infoTypes',
request_field='',
request_type_name=u'DlpInfoTypesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListInfoTypesResponse',
supports_download=False,
)
class OrganizationsDeidentifyTemplatesService(base_api.BaseApiService):
"""Service class for the organizations_deidentifyTemplates resource."""
_NAME = u'organizations_deidentifyTemplates'
def __init__(self, client):
super(DlpV2.OrganizationsDeidentifyTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a DeidentifyTemplate for re-using frequently used configuration.
for de-identifying content, images, and storage.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsDeidentifyTemplatesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/deidentifyTemplates',
http_method=u'POST',
method_id=u'dlp.organizations.deidentifyTemplates.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/deidentifyTemplates',
request_field=u'googlePrivacyDlpV2CreateDeidentifyTemplateRequest',
request_type_name=u'DlpOrganizationsDeidentifyTemplatesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsDeidentifyTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'DELETE',
method_id=u'dlp.organizations.deidentifyTemplates.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsDeidentifyTemplatesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsDeidentifyTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'GET',
method_id=u'dlp.organizations.deidentifyTemplates.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsDeidentifyTemplatesGetRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists DeidentifyTemplates.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsDeidentifyTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListDeidentifyTemplatesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/deidentifyTemplates',
http_method=u'GET',
method_id=u'dlp.organizations.deidentifyTemplates.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/deidentifyTemplates',
request_field='',
request_type_name=u'DlpOrganizationsDeidentifyTemplatesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListDeidentifyTemplatesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpOrganizationsDeidentifyTemplatesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'PATCH',
method_id=u'dlp.organizations.deidentifyTemplates.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateDeidentifyTemplateRequest',
request_type_name=u'DlpOrganizationsDeidentifyTemplatesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
class OrganizationsInspectTemplatesService(base_api.BaseApiService):
"""Service class for the organizations_inspectTemplates resource."""
_NAME = u'organizations_inspectTemplates'
def __init__(self, client):
super(DlpV2.OrganizationsInspectTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates an InspectTemplate for re-using frequently used configuration.
for inspecting content, images, and storage.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsInspectTemplatesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/inspectTemplates',
http_method=u'POST',
method_id=u'dlp.organizations.inspectTemplates.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/inspectTemplates',
request_field=u'googlePrivacyDlpV2CreateInspectTemplateRequest',
request_type_name=u'DlpOrganizationsInspectTemplatesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes an InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsInspectTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'DELETE',
method_id=u'dlp.organizations.inspectTemplates.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsInspectTemplatesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets an InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsInspectTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'GET',
method_id=u'dlp.organizations.inspectTemplates.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsInspectTemplatesGetRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists InspectTemplates.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsInspectTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListInspectTemplatesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/inspectTemplates',
http_method=u'GET',
method_id=u'dlp.organizations.inspectTemplates.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/inspectTemplates',
request_field='',
request_type_name=u'DlpOrganizationsInspectTemplatesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListInspectTemplatesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpOrganizationsInspectTemplatesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'PATCH',
method_id=u'dlp.organizations.inspectTemplates.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateInspectTemplateRequest',
request_type_name=u'DlpOrganizationsInspectTemplatesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
class OrganizationsStoredInfoTypesService(base_api.BaseApiService):
"""Service class for the organizations_storedInfoTypes resource."""
_NAME = u'organizations_storedInfoTypes'
def __init__(self, client):
super(DlpV2.OrganizationsStoredInfoTypesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a pre-built stored infoType to be used for inspection.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsStoredInfoTypesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/storedInfoTypes',
http_method=u'POST',
method_id=u'dlp.organizations.storedInfoTypes.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/storedInfoTypes',
request_field=u'googlePrivacyDlpV2CreateStoredInfoTypeRequest',
request_type_name=u'DlpOrganizationsStoredInfoTypesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a stored infoType.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsStoredInfoTypesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'DELETE',
method_id=u'dlp.organizations.storedInfoTypes.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsStoredInfoTypesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a stored infoType.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsStoredInfoTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'GET',
method_id=u'dlp.organizations.storedInfoTypes.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpOrganizationsStoredInfoTypesGetRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists stored infoTypes.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsStoredInfoTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListStoredInfoTypesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/storedInfoTypes',
http_method=u'GET',
method_id=u'dlp.organizations.storedInfoTypes.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/storedInfoTypes',
request_field='',
request_type_name=u'DlpOrganizationsStoredInfoTypesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListStoredInfoTypesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the stored infoType by creating a new version. The existing version.
will continue to be used until the new version is ready.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpOrganizationsStoredInfoTypesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/organizations/{organizationsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'PATCH',
method_id=u'dlp.organizations.storedInfoTypes.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateStoredInfoTypeRequest',
request_type_name=u'DlpOrganizationsStoredInfoTypesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
class OrganizationsService(base_api.BaseApiService):
"""Service class for the organizations resource."""
_NAME = u'organizations'
def __init__(self, client):
super(DlpV2.OrganizationsService, self).__init__(client)
self._upload_configs = {
}
class ProjectsContentService(base_api.BaseApiService):
"""Service class for the projects_content resource."""
_NAME = u'projects_content'
def __init__(self, client):
super(DlpV2.ProjectsContentService, self).__init__(client)
self._upload_configs = {
}
def Deidentify(self, request, global_params=None):
r"""De-identifies potentially sensitive info from a ContentItem.
This method has limits on input size and output size.
See https://cloud.google.com/dlp/docs/deidentify-sensitive-data to
learn more.
When no InfoTypes or CustomInfoTypes are specified in this request, the
system will automatically choose what detectors to run. By default this may
be all types, but may change over time as detectors are updated.
Args:
request: (DlpProjectsContentDeidentifyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyContentResponse) The response message.
"""
config = self.GetMethodConfig('Deidentify')
return self._RunMethod(
config, request, global_params=global_params)
Deidentify.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/content:deidentify',
http_method=u'POST',
method_id=u'dlp.projects.content.deidentify',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/content:deidentify',
request_field=u'googlePrivacyDlpV2DeidentifyContentRequest',
request_type_name=u'DlpProjectsContentDeidentifyRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyContentResponse',
supports_download=False,
)
def Inspect(self, request, global_params=None):
r"""Finds potentially sensitive info in content.
This method has limits on input size, processing time, and output size.
When no InfoTypes or CustomInfoTypes are specified in this request, the
system will automatically choose what detectors to run. By default this may
be all types, but may change over time as detectors are updated.
For how to guides, see https://cloud.google.com/dlp/docs/inspecting-images
and https://cloud.google.com/dlp/docs/inspecting-text,
Args:
request: (DlpProjectsContentInspectRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectContentResponse) The response message.
"""
config = self.GetMethodConfig('Inspect')
return self._RunMethod(
config, request, global_params=global_params)
Inspect.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/content:inspect',
http_method=u'POST',
method_id=u'dlp.projects.content.inspect',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/content:inspect',
request_field=u'googlePrivacyDlpV2InspectContentRequest',
request_type_name=u'DlpProjectsContentInspectRequest',
response_type_name=u'GooglePrivacyDlpV2InspectContentResponse',
supports_download=False,
)
def Reidentify(self, request, global_params=None):
r"""Re-identifies content that has been de-identified.
See
https://cloud.google.com/dlp/docs/pseudonymization#re-identification_in_free_text_code_example
to learn more.
Args:
request: (DlpProjectsContentReidentifyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ReidentifyContentResponse) The response message.
"""
config = self.GetMethodConfig('Reidentify')
return self._RunMethod(
config, request, global_params=global_params)
Reidentify.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/content:reidentify',
http_method=u'POST',
method_id=u'dlp.projects.content.reidentify',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/content:reidentify',
request_field=u'googlePrivacyDlpV2ReidentifyContentRequest',
request_type_name=u'DlpProjectsContentReidentifyRequest',
response_type_name=u'GooglePrivacyDlpV2ReidentifyContentResponse',
supports_download=False,
)
class ProjectsDeidentifyTemplatesService(base_api.BaseApiService):
"""Service class for the projects_deidentifyTemplates resource."""
_NAME = u'projects_deidentifyTemplates'
def __init__(self, client):
super(DlpV2.ProjectsDeidentifyTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a DeidentifyTemplate for re-using frequently used configuration.
for de-identifying content, images, and storage.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsDeidentifyTemplatesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/deidentifyTemplates',
http_method=u'POST',
method_id=u'dlp.projects.deidentifyTemplates.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/deidentifyTemplates',
request_field=u'googlePrivacyDlpV2CreateDeidentifyTemplateRequest',
request_type_name=u'DlpProjectsDeidentifyTemplatesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsDeidentifyTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'DELETE',
method_id=u'dlp.projects.deidentifyTemplates.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsDeidentifyTemplatesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsDeidentifyTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'GET',
method_id=u'dlp.projects.deidentifyTemplates.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsDeidentifyTemplatesGetRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists DeidentifyTemplates.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsDeidentifyTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListDeidentifyTemplatesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/deidentifyTemplates',
http_method=u'GET',
method_id=u'dlp.projects.deidentifyTemplates.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/deidentifyTemplates',
request_field='',
request_type_name=u'DlpProjectsDeidentifyTemplatesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListDeidentifyTemplatesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the DeidentifyTemplate.
See https://cloud.google.com/dlp/docs/creating-templates-deid to learn
more.
Args:
request: (DlpProjectsDeidentifyTemplatesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyTemplate) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/deidentifyTemplates/{deidentifyTemplatesId}',
http_method=u'PATCH',
method_id=u'dlp.projects.deidentifyTemplates.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateDeidentifyTemplateRequest',
request_type_name=u'DlpProjectsDeidentifyTemplatesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyTemplate',
supports_download=False,
)
class ProjectsDlpJobsService(base_api.BaseApiService):
"""Service class for the projects_dlpJobs resource."""
_NAME = u'projects_dlpJobs'
def __init__(self, client):
super(DlpV2.ProjectsDlpJobsService, self).__init__(client)
self._upload_configs = {
}
def Cancel(self, request, global_params=None):
r"""Starts asynchronous cancellation on a long-running DlpJob. The server.
makes a best effort to cancel the DlpJob, but success is not
guaranteed.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
Args:
request: (DlpProjectsDlpJobsCancelRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Cancel')
return self._RunMethod(
config, request, global_params=global_params)
Cancel.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/dlpJobs/{dlpJobsId}:cancel',
http_method=u'POST',
method_id=u'dlp.projects.dlpJobs.cancel',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}:cancel',
request_field=u'googlePrivacyDlpV2CancelDlpJobRequest',
request_type_name=u'DlpProjectsDlpJobsCancelRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a new job to inspect storage or calculate risk metrics.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
When no InfoTypes or CustomInfoTypes are specified in inspect jobs, the
system will automatically choose what detectors to run. By default this may
be all types, but may change over time as detectors are updated.
Args:
request: (DlpProjectsDlpJobsCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DlpJob) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/dlpJobs',
http_method=u'POST',
method_id=u'dlp.projects.dlpJobs.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/dlpJobs',
request_field=u'googlePrivacyDlpV2CreateDlpJobRequest',
request_type_name=u'DlpProjectsDlpJobsCreateRequest',
response_type_name=u'GooglePrivacyDlpV2DlpJob',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a long-running DlpJob. This method indicates that the client is.
no longer interested in the DlpJob result. The job will be cancelled if
possible.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
Args:
request: (DlpProjectsDlpJobsDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/dlpJobs/{dlpJobsId}',
http_method=u'DELETE',
method_id=u'dlp.projects.dlpJobs.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsDlpJobsDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets the latest state of a long-running DlpJob.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
Args:
request: (DlpProjectsDlpJobsGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DlpJob) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/dlpJobs/{dlpJobsId}',
http_method=u'GET',
method_id=u'dlp.projects.dlpJobs.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsDlpJobsGetRequest',
response_type_name=u'GooglePrivacyDlpV2DlpJob',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists DlpJobs that match the specified filter in the request.
See https://cloud.google.com/dlp/docs/inspecting-storage and
https://cloud.google.com/dlp/docs/compute-risk-analysis to learn more.
Args:
request: (DlpProjectsDlpJobsListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListDlpJobsResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/dlpJobs',
http_method=u'GET',
method_id=u'dlp.projects.dlpJobs.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'filter', u'orderBy', u'pageSize', u'pageToken', u'type'],
relative_path=u'v2/{+parent}/dlpJobs',
request_field='',
request_type_name=u'DlpProjectsDlpJobsListRequest',
response_type_name=u'GooglePrivacyDlpV2ListDlpJobsResponse',
supports_download=False,
)
class ProjectsImageService(base_api.BaseApiService):
"""Service class for the projects_image resource."""
_NAME = u'projects_image'
def __init__(self, client):
super(DlpV2.ProjectsImageService, self).__init__(client)
self._upload_configs = {
}
def Redact(self, request, global_params=None):
r"""Redacts potentially sensitive info from an image.
This method has limits on input size, processing time, and output size.
See https://cloud.google.com/dlp/docs/redacting-sensitive-data-images to
learn more.
When no InfoTypes or CustomInfoTypes are specified in this request, the
system will automatically choose what detectors to run. By default this may
be all types, but may change over time as detectors are updated.
Args:
request: (DlpProjectsImageRedactRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2RedactImageResponse) The response message.
"""
config = self.GetMethodConfig('Redact')
return self._RunMethod(
config, request, global_params=global_params)
Redact.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/image:redact',
http_method=u'POST',
method_id=u'dlp.projects.image.redact',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/image:redact',
request_field=u'googlePrivacyDlpV2RedactImageRequest',
request_type_name=u'DlpProjectsImageRedactRequest',
response_type_name=u'GooglePrivacyDlpV2RedactImageResponse',
supports_download=False,
)
class ProjectsInspectTemplatesService(base_api.BaseApiService):
"""Service class for the projects_inspectTemplates resource."""
_NAME = u'projects_inspectTemplates'
def __init__(self, client):
super(DlpV2.ProjectsInspectTemplatesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates an InspectTemplate for re-using frequently used configuration.
for inspecting content, images, and storage.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsInspectTemplatesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/inspectTemplates',
http_method=u'POST',
method_id=u'dlp.projects.inspectTemplates.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/inspectTemplates',
request_field=u'googlePrivacyDlpV2CreateInspectTemplateRequest',
request_type_name=u'DlpProjectsInspectTemplatesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes an InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsInspectTemplatesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'DELETE',
method_id=u'dlp.projects.inspectTemplates.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsInspectTemplatesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets an InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsInspectTemplatesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'GET',
method_id=u'dlp.projects.inspectTemplates.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsInspectTemplatesGetRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists InspectTemplates.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsInspectTemplatesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListInspectTemplatesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/inspectTemplates',
http_method=u'GET',
method_id=u'dlp.projects.inspectTemplates.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/inspectTemplates',
request_field='',
request_type_name=u'DlpProjectsInspectTemplatesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListInspectTemplatesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the InspectTemplate.
See https://cloud.google.com/dlp/docs/creating-templates to learn more.
Args:
request: (DlpProjectsInspectTemplatesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectTemplate) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/inspectTemplates/{inspectTemplatesId}',
http_method=u'PATCH',
method_id=u'dlp.projects.inspectTemplates.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateInspectTemplateRequest',
request_type_name=u'DlpProjectsInspectTemplatesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2InspectTemplate',
supports_download=False,
)
class ProjectsJobTriggersService(base_api.BaseApiService):
"""Service class for the projects_jobTriggers resource."""
_NAME = u'projects_jobTriggers'
def __init__(self, client):
super(DlpV2.ProjectsJobTriggersService, self).__init__(client)
self._upload_configs = {
}
def Activate(self, request, global_params=None):
r"""Activate a job trigger. Causes the immediate execute of a trigger.
instead of waiting on the trigger event to occur.
Args:
request: (DlpProjectsJobTriggersActivateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DlpJob) The response message.
"""
config = self.GetMethodConfig('Activate')
return self._RunMethod(
config, request, global_params=global_params)
Activate.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/jobTriggers/{jobTriggersId}:activate',
http_method=u'POST',
method_id=u'dlp.projects.jobTriggers.activate',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}:activate',
request_field=u'googlePrivacyDlpV2ActivateJobTriggerRequest',
request_type_name=u'DlpProjectsJobTriggersActivateRequest',
response_type_name=u'GooglePrivacyDlpV2DlpJob',
supports_download=False,
)
def Create(self, request, global_params=None):
r"""Creates a job trigger to run DLP actions such as scanning storage for.
sensitive information on a set schedule.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsJobTriggersCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2JobTrigger) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/jobTriggers',
http_method=u'POST',
method_id=u'dlp.projects.jobTriggers.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/jobTriggers',
request_field=u'googlePrivacyDlpV2CreateJobTriggerRequest',
request_type_name=u'DlpProjectsJobTriggersCreateRequest',
response_type_name=u'GooglePrivacyDlpV2JobTrigger',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a job trigger.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsJobTriggersDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/jobTriggers/{jobTriggersId}',
http_method=u'DELETE',
method_id=u'dlp.projects.jobTriggers.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsJobTriggersDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a job trigger.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsJobTriggersGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2JobTrigger) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/jobTriggers/{jobTriggersId}',
http_method=u'GET',
method_id=u'dlp.projects.jobTriggers.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsJobTriggersGetRequest',
response_type_name=u'GooglePrivacyDlpV2JobTrigger',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists job triggers.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsJobTriggersListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListJobTriggersResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/jobTriggers',
http_method=u'GET',
method_id=u'dlp.projects.jobTriggers.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'filter', u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/jobTriggers',
request_field='',
request_type_name=u'DlpProjectsJobTriggersListRequest',
response_type_name=u'GooglePrivacyDlpV2ListJobTriggersResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates a job trigger.
See https://cloud.google.com/dlp/docs/creating-job-triggers to learn more.
Args:
request: (DlpProjectsJobTriggersPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2JobTrigger) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/jobTriggers/{jobTriggersId}',
http_method=u'PATCH',
method_id=u'dlp.projects.jobTriggers.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateJobTriggerRequest',
request_type_name=u'DlpProjectsJobTriggersPatchRequest',
response_type_name=u'GooglePrivacyDlpV2JobTrigger',
supports_download=False,
)
class ProjectsLocationsContentService(base_api.BaseApiService):
"""Service class for the projects_locations_content resource."""
_NAME = u'projects_locations_content'
def __init__(self, client):
super(DlpV2.ProjectsLocationsContentService, self).__init__(client)
self._upload_configs = {
}
def Deidentify(self, request, global_params=None):
r"""De-identifies potentially sensitive info from a ContentItem.
This method has limits on input size and output size.
See https://cloud.google.com/dlp/docs/deidentify-sensitive-data to
learn more.
When no InfoTypes or CustomInfoTypes are specified in this request, the
system will automatically choose what detectors to run. By default this may
be all types, but may change over time as detectors are updated.
Args:
request: (DlpProjectsLocationsContentDeidentifyRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2DeidentifyContentResponse) The response message.
"""
config = self.GetMethodConfig('Deidentify')
return self._RunMethod(
config, request, global_params=global_params)
Deidentify.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{location}/content:deidentify',
http_method=u'POST',
method_id=u'dlp.projects.locations.content.deidentify',
ordered_params=[u'parent', u'location'],
path_params=[u'location', u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/locations/{location}/content:deidentify',
request_field=u'googlePrivacyDlpV2DeidentifyContentRequest',
request_type_name=u'DlpProjectsLocationsContentDeidentifyRequest',
response_type_name=u'GooglePrivacyDlpV2DeidentifyContentResponse',
supports_download=False,
)
def Inspect(self, request, global_params=None):
r"""Finds potentially sensitive info in content.
This method has limits on input size, processing time, and output size.
When no InfoTypes or CustomInfoTypes are specified in this request, the
system will automatically choose what detectors to run. By default this may
be all types, but may change over time as detectors are updated.
For how to guides, see https://cloud.google.com/dlp/docs/inspecting-images
and https://cloud.google.com/dlp/docs/inspecting-text,
Args:
request: (DlpProjectsLocationsContentInspectRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2InspectContentResponse) The response message.
"""
config = self.GetMethodConfig('Inspect')
return self._RunMethod(
config, request, global_params=global_params)
Inspect.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/locations/{location}/content:inspect',
http_method=u'POST',
method_id=u'dlp.projects.locations.content.inspect',
ordered_params=[u'parent', u'location'],
path_params=[u'location', u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/locations/{location}/content:inspect',
request_field=u'googlePrivacyDlpV2InspectContentRequest',
request_type_name=u'DlpProjectsLocationsContentInspectRequest',
response_type_name=u'GooglePrivacyDlpV2InspectContentResponse',
supports_download=False,
)
class ProjectsLocationsService(base_api.BaseApiService):
"""Service class for the projects_locations resource."""
_NAME = u'projects_locations'
def __init__(self, client):
super(DlpV2.ProjectsLocationsService, self).__init__(client)
self._upload_configs = {
}
class ProjectsStoredInfoTypesService(base_api.BaseApiService):
"""Service class for the projects_storedInfoTypes resource."""
_NAME = u'projects_storedInfoTypes'
def __init__(self, client):
super(DlpV2.ProjectsStoredInfoTypesService, self).__init__(client)
self._upload_configs = {
}
def Create(self, request, global_params=None):
r"""Creates a pre-built stored infoType to be used for inspection.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsStoredInfoTypesCreateRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Create')
return self._RunMethod(
config, request, global_params=global_params)
Create.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/storedInfoTypes',
http_method=u'POST',
method_id=u'dlp.projects.storedInfoTypes.create',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[],
relative_path=u'v2/{+parent}/storedInfoTypes',
request_field=u'googlePrivacyDlpV2CreateStoredInfoTypeRequest',
request_type_name=u'DlpProjectsStoredInfoTypesCreateRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
def Delete(self, request, global_params=None):
r"""Deletes a stored infoType.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsStoredInfoTypesDeleteRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GoogleProtobufEmpty) The response message.
"""
config = self.GetMethodConfig('Delete')
return self._RunMethod(
config, request, global_params=global_params)
Delete.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'DELETE',
method_id=u'dlp.projects.storedInfoTypes.delete',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsStoredInfoTypesDeleteRequest',
response_type_name=u'GoogleProtobufEmpty',
supports_download=False,
)
def Get(self, request, global_params=None):
r"""Gets a stored infoType.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsStoredInfoTypesGetRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Get')
return self._RunMethod(
config, request, global_params=global_params)
Get.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'GET',
method_id=u'dlp.projects.storedInfoTypes.get',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field='',
request_type_name=u'DlpProjectsStoredInfoTypesGetRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
def List(self, request, global_params=None):
r"""Lists stored infoTypes.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsStoredInfoTypesListRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2ListStoredInfoTypesResponse) The response message.
"""
config = self.GetMethodConfig('List')
return self._RunMethod(
config, request, global_params=global_params)
List.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/storedInfoTypes',
http_method=u'GET',
method_id=u'dlp.projects.storedInfoTypes.list',
ordered_params=[u'parent'],
path_params=[u'parent'],
query_params=[u'orderBy', u'pageSize', u'pageToken'],
relative_path=u'v2/{+parent}/storedInfoTypes',
request_field='',
request_type_name=u'DlpProjectsStoredInfoTypesListRequest',
response_type_name=u'GooglePrivacyDlpV2ListStoredInfoTypesResponse',
supports_download=False,
)
def Patch(self, request, global_params=None):
r"""Updates the stored infoType by creating a new version. The existing version.
will continue to be used until the new version is ready.
See https://cloud.google.com/dlp/docs/creating-stored-infotypes to
learn more.
Args:
request: (DlpProjectsStoredInfoTypesPatchRequest) input message
global_params: (StandardQueryParameters, default: None) global arguments
Returns:
(GooglePrivacyDlpV2StoredInfoType) The response message.
"""
config = self.GetMethodConfig('Patch')
return self._RunMethod(
config, request, global_params=global_params)
Patch.method_config = lambda: base_api.ApiMethodInfo(
flat_path=u'v2/projects/{projectsId}/storedInfoTypes/{storedInfoTypesId}',
http_method=u'PATCH',
method_id=u'dlp.projects.storedInfoTypes.patch',
ordered_params=[u'name'],
path_params=[u'name'],
query_params=[],
relative_path=u'v2/{+name}',
request_field=u'googlePrivacyDlpV2UpdateStoredInfoTypeRequest',
request_type_name=u'DlpProjectsStoredInfoTypesPatchRequest',
response_type_name=u'GooglePrivacyDlpV2StoredInfoType',
supports_download=False,
)
class ProjectsService(base_api.BaseApiService):
"""Service class for the projects resource."""
_NAME = u'projects'
def __init__(self, client):
super(DlpV2.ProjectsService, self).__init__(client)
self._upload_configs = {
}
| 40.322023
| 100
| 0.708929
| 6,666
| 65,362
| 6.764626
| 0.055356
| 0.051893
| 0.04045
| 0.022753
| 0.801033
| 0.79753
| 0.785532
| 0.771517
| 0.749961
| 0.717983
| 0
| 0.00457
| 0.193262
| 65,362
| 1,620
| 101
| 40.346914
| 0.850597
| 0.328417
| 0
| 0.657631
| 1
| 0
| 0.258396
| 0.215219
| 0
| 0
| 0
| 0
| 0
| 1
| 0.064257
| false
| 0
| 0.002008
| 0
| 0.141566
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8b2abec726679d8c016af4db08f3997914b62a92
| 1,504
|
py
|
Python
|
src/api/api/migrations/0006_auto_20210531_0103.py
|
Hakan-er/utma
|
f7cd6253ec894047b460d4df9b43eeb9b109bae2
|
[
"MIT"
] | 1
|
2022-02-02T20:36:27.000Z
|
2022-02-02T20:36:27.000Z
|
src/api/api/migrations/0006_auto_20210531_0103.py
|
Hakan-er/utma
|
f7cd6253ec894047b460d4df9b43eeb9b109bae2
|
[
"MIT"
] | null | null | null |
src/api/api/migrations/0006_auto_20210531_0103.py
|
Hakan-er/utma
|
f7cd6253ec894047b460d4df9b43eeb9b109bae2
|
[
"MIT"
] | 1
|
2021-06-17T20:50:41.000Z
|
2021-06-17T20:50:41.000Z
|
# Generated by Django 3.2 on 2021-05-30 22:03
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0005_auto_20210530_2314'),
]
operations = [
migrations.AddField(
model_name='dataset',
name='categoricalAttributes',
field=models.CharField(blank=True, max_length=1000, null=True),
),
migrations.AddField(
model_name='dataset',
name='numericAttributes',
field=models.CharField(blank=True, max_length=1000, null=True),
),
migrations.AddField(
model_name='result',
name='accuracy',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AddField(
model_name='result',
name='algorithmName',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AddField(
model_name='result',
name='datasetName',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AddField(
model_name='task',
name='algorithmName',
field=models.CharField(blank=True, max_length=200, null=True),
),
migrations.AddField(
model_name='task',
name='datasetName',
field=models.CharField(blank=True, max_length=200, null=True),
),
]
| 30.693878
| 75
| 0.570479
| 147
| 1,504
| 5.721088
| 0.306122
| 0.149822
| 0.191439
| 0.224732
| 0.772889
| 0.772889
| 0.714625
| 0.714625
| 0.702735
| 0.702735
| 0
| 0.051109
| 0.310505
| 1,504
| 48
| 76
| 31.333333
| 0.759884
| 0.02859
| 0
| 0.761905
| 1
| 0
| 0.109664
| 0.030158
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.02381
| 0
| 0.095238
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
8b31e48708cca83592114d4f44ded2a92e78189c
| 9,277
|
py
|
Python
|
dyck/grammars/mcfg2.py
|
omelkonian/dyck
|
175d0027bdccdfc91be88b991535d05a244fdbb3
|
[
"MIT"
] | 2
|
2019-03-07T06:09:41.000Z
|
2020-12-10T14:10:38.000Z
|
dyck/grammars/mcfg2.py
|
omelkonian/dyck
|
175d0027bdccdfc91be88b991535d05a244fdbb3
|
[
"MIT"
] | 10
|
2018-01-22T10:17:49.000Z
|
2018-06-01T20:49:27.000Z
|
dyck/grammars/mcfg2.py
|
omelkonian/dyck
|
175d0027bdccdfc91be88b991535d05a244fdbb3
|
[
"MIT"
] | null | null | null |
from ..dyck import Grammar
from ..grammar_utils import *
"""
ababaacbcbcc
W
- [W] null
- [BC] b -> c
- [AB] a -> b
BC(|ab|,|c|)
BC(ab |ab|, |c| c)
AB(abab |a|, |cb| cc)
W(ababa |acb|, cbcc)
# extra
W(abc, abc)
#
BC(|ab|, |c|)
AB(ab|a|, |cb|c)
BC(|ab|aba, cbc|c|)
AB(ababa|a|, |cb|cbcc)
"""
all_states = ['W', 'BC', 'AB'] #, 'ABC']
mcfg2 = Grammar([
r('S <- W', {(x, y)}),
O('W', {(a, b, c)}),
O('W <- W', {(x, y), (a, b, c)}),
# O('W <- W, W', {(x, y), (l, m)}),
])
# mcfg2 = Grammar([
# r('S <- W', {(x, y)}),
# O('A', {(a)}),
# O('B', {(b)}),
# O('C', {(c)}),
# O('W', {(a, b, c)}),
# O('W <- W, A, B, C', {(x, y), (l, m, q, w, r, t), (a, b, c)}),
# ])
mcfg22 = Grammar([
### Base cases
# r('S <- W', {(x, y)}),
# # all_c('W', [], orders=[(a, b, c)], left=[a, b, c]),
# all_c('W', [], orders=[(a, b, c)], right=[a, b, c]),
# all_c('BC', [], orders=[(a, b, c)], left=[a, b], right=[c]),
# all_c('AB', [], orders=[(a, b, c)], left=[a], right=[b, c]),
# # ###
# # ### SAFETY
# O('W <- BC', {(x, y)}),
# O('W <- AB', {(x, y)}),
# # O('W <- ABC', {(x, y)}),
# # ###
# # Interleaving words
# O('W <- W, W', {(x, y), (z, w)}),
# # all_c('W', ['AB', 'BC'], orders=[(x, y), (z, w)]
# all_c('AB', ['AB', 'BC'], orders=[(x, y), (z, w)], left=[x], right=[y]),
# all_c('BC', ['AB', 'BC'], orders=[(x, y), (z, w)], left=[z], right=[w]),
# # all_c('ABC', ['AB', 'BC'], orders=[(x, y), (z, w)]
# # , left=[x, z], right=[y, w]),
# all_c('W', ['AB', 'AB'], orders=[(x, y), (z, w), (x, w), (z, y)]
# , left=[y, w]),
# all_c('W', ['AB', 'AB'], orders=[(x, y), (z, w), (x, w), (z, y)]
# , right=[x, z]),
# all_c('AB', ['AB', 'AB'], orders=[(x, y), (z, w), (x, w), (z, y)]
# , left=[x], right=[z, y, w]),
# all_c('AB', ['AB', 'AB'], orders=[(x, y), (z, w), (x, w), (z, y)]
# , left=[z], right=[x, y, w]),
# all_c('AB', ['AB', 'AB'], orders=[(x, y), (z, w), (x, w), (z, y)]
# , left=[x, z], right=[y, w]),
# all_c('AB', ['AB', 'AB'], orders=[(x, y), (z, w), (x, w), (z, y)]
# , left=[x, z, w], right=[y]),
# all_c('AB', ['AB', 'AB'], orders=[(x, y), (z, w), (x, w), (z, y)]
# , left=[x, z, y], right=[w]),
# all_c('W', ['BC', 'BC'], orders=[(x, y), (z, w), (x, w), (z, y)]
# , left=[y, w]),
# all_c('W', ['BC', 'BC'], orders=[(x, y), (z, w), (x, w), (z, y)]
# , right=[x, z]),
# all_c('BC', ['BC', 'BC'], orders=[(x, y), (z, w), (x, w), (z, y)]
# , left=[x], right=[z, y, w]),
# all_c('BC', ['BC', 'BC'], orders=[(x, y), (z, w), (x, w), (z, y)]
# , left=[z], right=[x, y, w]),
# all_c('BC', ['BC', 'BC'], orders=[(x, y), (z, w), (x, w), (z, y)]
# , left=[x, z], right=[y, w]),
# all_c('BC', ['BC', 'BC'], orders=[(x, y), (z, w), (x, w), (z, y)]
# , left=[x, z, w], right=[y]),
# all_c('BC', ['BC', 'BC'], orders=[(x, y), (z, w), (x, w), (z, y)]
# , left=[x, z, y], right=[w]),
# # ABC
# # all_c('ABC', ['BC'], orders=[(x, y), (a, b, c)]
# # , left=[a, x], right=[y, b]),
# # all_c('ABC', ['AB'], orders=[(x, y), (a, b, c)]
# # , left=[x, b], right=[y, c]),
# # all_c('ABC', ['BC'], orders=[(x, y), (a, c, b), (x, c), (b, y)]
# # , left=[a, x], right=[b, c]),
# # all_c('ABC', ['AB'], orders=[(x, y), (b, a, c), (a, y), (x, b)]
# # , left=[a, b], right=[y, c]),
# # all_c('ABC', ['AB'], orders=[(x, y), (b, c, a), (a, y), (x, b)]
# # , left=[a, b], right=[y, c]),
# # ABC -> W
# all_c('W', ['ABC'], orders=[(x, y), (c, b, a), (x, b, y), (x, c), (a, y)]
# , left=[y, a]),
# all_c('W', ['ABC'], orders=[(x, y), (c, b, a), (x, b, y), (x, c), (a, y)]
# , right=[x, c]),
# # ABC -> AB
# all_c('AB', ['ABC'], orders=[(x, y), (c, b, a), (x, b, y), (x, c), (a, y)]
# , left=[c], right=[b]),
# # ABC -> BC
# all_c('BC', ['ABC'], orders=[(x, y), (c, b, a), (x, b, y), (x, c), (a, y)]
# , left=[b], right=[a]),
# # ABC -> ABC
# all_c('ABC', ['ABC'], orders=[(x, y), (c, b, a), (x, b, y), (x, c), (a, y)]
# , left=[x], right=[c]),
# all_c('ABC', ['ABC'], orders=[(x, y), (c, b, a), (x, b, y), (x, c), (a, y)]
# , left=[a], right=[y]),
# ### Insert "abc"
# # W
# O('W <- W', {(x, y), (a, b, c)}),
# # BC -> W
# all_c('W', ['BC'], orders=[(x, y), (a, b, c)]
# , right=[x, a]),
# all_c('W', ['BC'], orders=[(x, y), (a, b, c)]
# , left=[y], right=[a]),
# all_c('W', ['BC'], orders=[(x, y), (a, b, c)]
# , left=[c], right=[x]),
# all_c('W', ['BC'], orders=[(x, y), (a, b, c)]
# , left=[y, c]),
# # BC -> BC
# all_c('BC', ['BC'], orders=[(x, y), (a, b, c)]
# , left=[x], right=[y, a]),
# all_c('BC', ['BC'], orders=[(x, y), (a, b, c)]
# , left=[a, x], right=[y, b]),
# all_c('BC', ['BC'], orders=[(x, y), (a, b, c)]
# , left=[a, b], right=[c, x]),
# all_c('BC', ['BC'], orders=[(x, y), (a, b, c)]
# , left=[x, b], right=[y, c]), # ^2
# all_c('BC', ['BC'], orders=[(x, y), (a, b, c)]
# , left=[y, b], right=[c]),
# all_c('BC', ['BC'], orders=[(x, y), (a, b, c)]
# , left=[x, c], right=[y]),
# # BC -> AB
# all_c('AB', ['BC'], orders=[(x, y), (a, b, c)]
# , left=[a], right=[x, b]),
# all_c('AB', ['BC'], orders=[(x, y), (a, b, c)]
# , left=[a, x], right=[y, b]),
# all_c('AB', ['BC'], orders=[(x, y), (a, b, c)]
# , left=[y, a], right=[b]),
# O('W <- AB', {(x, y), (a, b, c)}),
# # AB -> W
# all_c('W', ['AB'], orders=[(x, y), (a, b, c)]
# , right=[x, a]),
# all_c('W', ['AB'], orders=[(x, y), (a, b, c)]
# , left=[y, c]),
# all_c('W', ['AB'], orders=[(x, y), (a, b, c)]
# , left=[y], right=[a]),
# all_c('W', ['AB'], orders=[(x, y), (a, b, c)]
# , left=[c], right=[x]),
# # AB -> BC
# all_c('BC', ['AB'], orders=[(x, y), (a, b, c)]
# , left=[b], right=[x, c]),
# all_c('BC', ['AB'], orders=[(x, y), (a, b, c)]
# , left=[x, b], right=[y, c]),
# all_c('BC', ['AB'], orders=[(x, y), (a, b, c)]
# , left=[y, b], right=[c]),
# # AB -> AB
# all_c('AB', ['AB'], orders=[(x, y), (a, b, c)]
# , left=[x], right=[y, a]),
# all_c('AB', ['AB'], orders=[(x, y), (a, b, c)]
# , left=[a], right=[x, b]),
# all_c('AB', ['AB'], orders=[(x, y), (a, b, c)]
# , left=[x, a], right=[y, b]), # ^2
# all_c('AB', ['AB'], orders=[(x, y), (a, b, c)]
# , left=[y, a], right=[b]),
# all_c('AB', ['AB'], orders=[(x, y), (a, b, c)]
# , left=[x, b], right=[y, c]),
# all_c('AB', ['AB'], orders=[(x, y), (a, b, c)]
# , left=[x, c], right=[y]),
# ### Insert "acb"
# # BC -> W
# all_c('W', ['BC'], orders=[(x, y), (a, c, b), (x, c), (b, y)]),
# # BC -> AB
# all_c('AB', ['BC'], orders=[(x, y), (a, c, b), (x, c), (b, y)]
# , left=[a], right=[b]),
# # BC -> BC
# all_c('BC', ['BC'], orders=[(x, y), (a, c, b), (x, c), (b, y)]
# , left=[x], right=[c]),
# all_c('BC', ['BC'], orders=[(x, y), (a, c, b), (x, c), (b, y)]
# , left=[b], right=[y]),
# ### Insert "bac"
# # AB -> W
# all_c('W', ['AB'], orders=[(x, y), (b, a, c), (a, y), (x, b)]),
# # AB -> AB
# all_c('AB', ['AB'], orders=[(x, y), (b, a, c), (a, y), (x, b)]
# , left=[x], right=[b]),
# all_c('AB', ['AB'], orders=[(x, y), (b, a, c), (a, y), (x, b)]
# , left=[a], right=[y]),
# # AB -> BC
# all_c('BC', ['AB'], orders=[(x, y), (b, a, c), (a, y), (x, b)]
# , left=[b], right=[c]),
# ### Insert "bca"
# # BC -> W
# all_c('W', ['AB'], orders=[(x, y), (b, c, a), (a, y), (x, b)]),
# # BC -> AB
# all_c('AB', ['AB'], orders=[(x, y), (b, c, a), (a, y), (x, b)]
# , left=[x], right=[b]),
# all_c('AB', ['AB'], orders=[(x, y), (b, c, a), (a, y), (x, b)]
# , left=[a], right=[y]),
# # BC -> BC
# all_c('BC', ['AB'], orders=[(x, y), (b, c, a), (a, y), (x, b)]
# , left=[b], right=[c]),
# # # Debugging
# ('$_W', ['W'], [[x, '$', y]]),
])
| 35.818533
| 81
| 0.302253
| 1,408
| 9,277
| 1.93892
| 0.035511
| 0.060073
| 0.196337
| 0.108791
| 0.841392
| 0.821978
| 0.80696
| 0.78315
| 0.72967
| 0.691575
| 0
| 0.001012
| 0.361
| 9,277
| 258
| 82
| 35.957364
| 0.459514
| 0.824943
| 0
| 0.2
| 0
| 0
| 0.016
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.2
| 0
| 0.2
| 0
| 0
| 0
| 1
| null | 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
509c3eb87c0ae2617391b0ac9b5fec5c79798d33
| 178
|
py
|
Python
|
Solutions/Training/Lesson_15/__init__.py
|
dev-11/codility-solutions
|
01b0ce4a43b1390fe15f2daabea95e90b834fbfc
|
[
"MIT"
] | null | null | null |
Solutions/Training/Lesson_15/__init__.py
|
dev-11/codility-solutions
|
01b0ce4a43b1390fe15f2daabea95e90b834fbfc
|
[
"MIT"
] | null | null | null |
Solutions/Training/Lesson_15/__init__.py
|
dev-11/codility-solutions
|
01b0ce4a43b1390fe15f2daabea95e90b834fbfc
|
[
"MIT"
] | null | null | null |
from .count_distinct_slices import solution as count_distinct_slices
from .count_triangles import solution as count_triangles
from .abs_distinct import solution as abs_distinct
| 35.6
| 68
| 0.876404
| 26
| 178
| 5.692308
| 0.346154
| 0.283784
| 0.324324
| 0.283784
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.106742
| 178
| 4
| 69
| 44.5
| 0.930818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
509c43e2240c25019f60e515325095a2c5586425
| 8,200
|
py
|
Python
|
flexget/tests/api_tests/test_format_checker_api.py
|
davidcollom/Flexget
|
cd763e04afdf6da8f1673dd567a42d55d4cb3b6c
|
[
"MIT"
] | 1
|
2021-03-24T11:54:01.000Z
|
2021-03-24T11:54:01.000Z
|
flexget/tests/api_tests/test_format_checker_api.py
|
davidcollom/Flexget
|
cd763e04afdf6da8f1673dd567a42d55d4cb3b6c
|
[
"MIT"
] | null | null | null |
flexget/tests/api_tests/test_format_checker_api.py
|
davidcollom/Flexget
|
cd763e04afdf6da8f1673dd567a42d55d4cb3b6c
|
[
"MIT"
] | null | null | null |
from __future__ import unicode_literals, division, absolute_import
from builtins import * # noqa pylint: disable=unused-import, redefined-builtin
from flexget.api.app import base_message
from flexget.utils import json
class TestFormatChecker(object):
config = 'tasks: {}'
def test_quality(self, api_client, schema_match):
payload1 = {'quality': '720p'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload1))
assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
payload2 = {'quality': '720p-1080p'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload2))
assert rsp.status_code == 422, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
def test_quality_req(self, api_client, schema_match):
payload1 = {'quality_requirements': '720p-1080p'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload1))
assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
payload2 = {'quality': 'bla'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload2))
assert rsp.status_code == 422, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
def test_time(self, api_client, schema_match):
payload = {'time': '10:00'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload))
assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
payload = {'time': 'bla'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload))
assert rsp.status_code == 422, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
def test_interval(self, api_client, schema_match):
payload1 = {'interval': '1 day'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload1))
assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
payload2 = {'interval': 'bla'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload2))
assert rsp.status_code == 422, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
def test_percent(self, api_client, schema_match):
payload1 = {'percent': '79%'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload1))
assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
payload2 = {'percent': 'bla'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload2))
assert rsp.status_code == 422, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
def test_size(self, api_client, schema_match):
payload1 = {'size': '4GB'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload1))
assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
payload2 = {'percent': 'bla'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload2))
assert rsp.status_code == 422, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
def test_regex(self, api_client, schema_match):
payload1 = {'regex': 'bla'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload1))
assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
payload2 = {'regex': '(('}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload2))
assert rsp.status_code == 422, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
def test_file(self, api_client, schema_match):
payload1 = {'file': 'test_format_checker_api.py'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload1))
assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
payload2 = {'file': 'bla'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload2))
assert rsp.status_code == 422, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
def test_path(self, api_client, schema_match):
payload1 = {'path': '../api_tests'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload1))
assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
payload2 = {'path': 'bla'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload2))
assert rsp.status_code == 422, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
def test_url(self, api_client, schema_match):
payload1 = {'url': 'http://google.com'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload1))
assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
payload2 = {'url': 'bla'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload2))
assert rsp.status_code == 422, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
def test_episode_identifier(self, api_client, schema_match):
payload1 = {'episode_identifier': 's01e01'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload1))
assert rsp.status_code == 200, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
payload2 = {'episode_identifier': 'bla'}
rsp = api_client.json_post('/format_check/', data=json.dumps(payload2))
assert rsp.status_code == 422, 'Response code is %s' % rsp.status_code
data = json.loads(rsp.get_data(as_text=True))
errors = schema_match(base_message, data)
assert not errors
| 37.272727
| 79
| 0.648902
| 1,106
| 8,200
| 4.587703
| 0.076854
| 0.069373
| 0.112732
| 0.069373
| 0.903823
| 0.899093
| 0.84864
| 0.833268
| 0.833268
| 0.833268
| 0
| 0.02131
| 0.227439
| 8,200
| 219
| 80
| 37.442922
| 0.779637
| 0.006463
| 0
| 0.751678
| 0
| 0
| 0.125967
| 0.003192
| 0
| 0
| 0
| 0
| 0.295302
| 1
| 0.073826
| false
| 0
| 0.026846
| 0
| 0.114094
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
50dee6cd3f304309f4de52e0d42252fe491f7a98
| 72,404
|
py
|
Python
|
reference/tests.py
|
thesteau/Portfolio-Janggi
|
cbc7b3c92037fbb9f27d812646f02c0345318a75
|
[
"MIT"
] | null | null | null |
reference/tests.py
|
thesteau/Portfolio-Janggi
|
cbc7b3c92037fbb9f27d812646f02c0345318a75
|
[
"MIT"
] | null | null | null |
reference/tests.py
|
thesteau/Portfolio-Janggi
|
cbc7b3c92037fbb9f27d812646f02c0345318a75
|
[
"MIT"
] | null | null | null |
import unittest
from game_files.game import JanggiGame
#from JanggiGame import JanggiGame
class TestJanggiGame(unittest.TestCase):
def setUp(self):
self.unfinished = 'UNFINISHED'
self.red_won = 'RED_WON'
self.blue_won = 'BLUE_WON'
self.invalid_move = False
self.valid_move = True
# @visibility('visible')
def test_game_can_be_instantiated(self):
"""test that we can create a JanggiGame object"""
g = JanggiGame()
self.assertIsInstance(g, JanggiGame)
# @visibility('visible')
def test_that_blue_can_start_the_game(self):
"""RULES: Blue can start the game"""
g = JanggiGame()
blue_move = g.make_move('c7', 'c6')
self.assertIs(blue_move, True)
# @visibility('after_due_date')
def test_that_red_cannot_start_the_game(self):
"""RULES: Red cannot start the game"""
g = JanggiGame()
red_move = g.make_move('c4', 'c5')
self.assertIs(red_move, False)
# @visibility('visible')
def test_passing_the_turn(self):
"""RULES: test that a player can pass the turn"""
g = JanggiGame()
g.make_move('a7', 'b7') # first move by Blue
move_result = g.make_move('a4', 'a4') # passing move by Red
self.assertIs(move_result, True)
move_result = g.make_move('b7', 'b7') # passing move by Blue
self.assertIs(move_result, True)
# @visibility('after_due_date')
def test_passing_the_turn_after_capturing(self):
"""RULES: test that a player can pass the turn, a turn after capturing another player's piece"""
g = JanggiGame()
g.make_move('a7', 'a6') # first move by Blue
g.make_move('a4', 'a4') # passing move by Red
g.make_move('a6', 'a5') # valid move by Blue
g.make_move('e4', 'e5') # valid move by Red
g.make_move('a5', 'a4') # capturing move by Blue
g.make_move('c4', 'c5') # valid move by Red
passing_move = g.make_move('a4', 'a4') # passing move by Blue
self.assertIs(passing_move, True)
valid_move_after_passing = g.make_move('c5', 'c6') # valid move by Red
self.assertIs(valid_move_after_passing, True)
# @visibility('after_due_date')
def test_passing_the_turn_after_being_captured(self):
"""RULES: test that a player can pass the turn, after their piece has been captured"""
g = JanggiGame()
g.make_move('a7', 'a6') # first move by Blue
g.make_move('a4', 'a4') # passing move by Red
g.make_move('a6', 'a5') # valid move by Blue
g.make_move('e4', 'e5') # valid move by Red
g.make_move('a5', 'a4') # capturing move by Blue
passing_move = g.make_move('c4', 'c4') # passing move by Red
self.assertIs(passing_move, True)
valid_move_after_passing = g.make_move('a4', 'a3') # valid move by Blue
self.assertIs(valid_move_after_passing, True)
# @visibility('visible')
def test_valid_forward_move_for_red_soldier(self):
"""SOLDIER: test in the beginning of the game that a red soldier can perform a valid forward move"""
#take a soldier for red
g = JanggiGame()
first_move = g.make_move('c7','c6') #because blue moves first
red_soldier_forward_move = g.make_move('c4','c5')
self.assertIs(red_soldier_forward_move, True)
# @visibility('visible')
def test_valid_sideway_move_for_red_soldier(self):
"""SOLDIER: test in the beginning of the game that a red soldier can perform a valid sideway move"""
# take a soldier for red
g = JanggiGame()
first_move = g.make_move('c7', 'c6') # because blue moves first
red_soldier_sideway_move = g.make_move('c4', 'd4')
self.assertIs(red_soldier_sideway_move, True)
# @visibility('visible')
def test_valid_forward_move_for_blue_soldier(self):
"""SOLDIER: test in the beginning of the game that a blue soldier can perform a valid forward move"""
g = JanggiGame()
blue_soldier_forward_move = g.make_move('c7', 'c6') # because blue moves first
self.assertIs(blue_soldier_forward_move, True)
# @visibility('visible')
def test_valid_sideway_move_for_blue_soldier(self):
"""SOLDIER: test in the beginning of the game that a blue soldier can perform a valid sideway move"""
g = JanggiGame()
blue_soldier_sideway_move = g.make_move('c7', 'b7') # because blue moves first
self.assertIs(blue_soldier_sideway_move, True)
# @visibility('after_due_date')
def test_invalid_backward_move_for_blue_soldier(self):
"""SOLDIER: test in the beginning of the game that a blue soldier cannot perform an backward move"""
g = JanggiGame()
blue_soldier_backward_move = g.make_move('c7', 'c8') # try backward move which is illegal
self.assertIs(blue_soldier_backward_move, False)
pass
# @visibility('after_due_date')
def test_invalid_diagonal_move_for_blue_soldier(self):
"""SOLDIER: test in the beginning of the game that a blue soldier cannot perform an diagonal move"""
g = JanggiGame()
blue_soldier_diagonal_move = g.make_move('c7', 'd6') # try diagonal which is illegal
self.assertIs(blue_soldier_diagonal_move, False)
pass
# @visibility('after_due_date')
def test_invalid_backward_move_for_red_soldier(self):
"""SOLDIER: test in the beginning of the game that a red soldier cannot perform an backward move"""
g = JanggiGame()
g.make_move('c7', 'c6') # because blue moves first
red_soldier_backward_move = g.make_move('c4', 'c3') # try backward move which is illegal
self.assertIs(red_soldier_backward_move, False)
pass
# @visibility('after_due_date')
def test_invalid_diagonal_move_for_red_soldier(self):
"""SOLDIER: test in the beginning of the game that a red soldier cannot perform an diagonal move"""
g = JanggiGame()
g.make_move('c7', 'c6') # because blue moves first
red_soldier_backward_move = g.make_move('c4', 'd5') # try diagonal move which is illegal
self.assertIs(red_soldier_backward_move, False)
# @visibility('visible')
def test_a_soldier_can_capture_a_piece(self):
"""SOLDIER: test that a soldier can capture another piece"""
g = JanggiGame()
g.make_move('a7', 'a6') # because blue moves first
g.make_move('i4', 'i5') # first move by Red
g.make_move('a6', 'a5') # valid move by Blue
g.make_move('i5', 'i6') # valid move by Red
capturing_move = g.make_move('a5', 'a4') # capturing move by Blue
self.assertIs(capturing_move, True) #move should be succesful
# @visibility('after_due_date')
def test_that_a_captured_piece_cannot_be_moved(self):
"""RULES: test that once a piece has been captured, it no longer exists for movement"""
g = JanggiGame()
g.make_move('a7', 'a6') # because blue moves first
g.make_move('i4', 'i5') # first move by Red
g.make_move('a6', 'a5') # valid move by Blue
g.make_move('i5', 'i6') # valid move by Red
capturing_move = g.make_move('a5', 'a4') # capturing move by Blue
move_on_a_non_existent_piece = g.make_move('a4', 'a5') # move from a location owned by Red
self.assertIs(move_on_a_non_existent_piece, False)
# @visibility('after_due_date')
def test_that_after_a_piece_is_captured_the_game_can_still_continue(self):
"""RULES: test that after a capture, the other player can still move"""
g = JanggiGame()
g.make_move('a7', 'a6') # because blue moves first
g.make_move('i4', 'i5') # first move by Red
g.make_move('a6', 'a5') # valid move by Blue
g.make_move('i5', 'i6') # valid move by Red
capturing_move = g.make_move('a5', 'a4') # capturing move by Blue
move_on_a_non_existent_piece = g.make_move('a4', 'a5') # move from a location owned by Red
valid_move_by_red = g.make_move('e4', 'e5') # valid move by Red
self.assertIs(valid_move_by_red, True)
# @visibility('visible')
def test_passing_the_turn(self):
"""RULES: test that a player can pass the turn"""
g = JanggiGame()
move_result = g.make_move('a7', 'b7') # first move by Blue
self.assertIs(move_result, True)
move_result = g.make_move('a4', 'a4') # passing move by Red
self.assertIs(move_result, True)
move_result = g.make_move('b7', 'b6') # valid move by Blue
self.assertIs(move_result, True)
# @visibility('after_due_date')
def test_passing_the_turn_after_capturing(self):
"""RULES: test that a player can pass the turn, a turn after capturing another player's piece"""
g = JanggiGame()
g.make_move('a7', 'a6') # first move by Blue
g.make_move('a4', 'a4') # passing move by Red
g.make_move('a6', 'a5') # valid move by Blue
g.make_move('e4', 'e5') # valid move by Red
g.make_move('a5', 'a4') # capturing move by Blue
g.make_move('c4', 'c5') # valid move by Red
passing_move = g.make_move('a4', 'a4') # passing move by Blue
self.assertIs(passing_move, True)
valid_move_after_passing = g.make_move('c5', 'c6') # valid move by Red
self.assertIs(valid_move_after_passing, True)
# @visibility('after_due_date')
def test_passing_the_turn_after_being_captured(self):
"""RULES: test that a player can pass the turn, after their piece has been captured"""
g = JanggiGame()
g.make_move('a7', 'a6') # first move by Blue
g.make_move('a4', 'a4') # passing move by Red
g.make_move('a6', 'a5') # valid move by Blue
g.make_move('e4', 'e5') # valid move by Red
g.make_move('a5', 'a4') # capturing move by Blue
passing_move = g.make_move('c4', 'c4') # passing move by Red
self.assertIs(passing_move, True)
valid_move_after_passing = g.make_move('a4', 'a3') # valid move by Blue
self.assertIs(valid_move_after_passing, True)
# @visibility('visible')
def test_valid_forward_move_for_red_chariot(self):
"""CHARIOT: test in the beginning of the game that a red chariot can perform a valid forward move"""
#take a soldier for red
g = JanggiGame()
first_move = g.make_move('c7','c6') #because blue moves first
red_chariot_forward_move = g.make_move('a1','a3')
self.assertIs(red_chariot_forward_move, True)
# @visibility('visible')
def test_valid_sideway_move_for_red_chariot(self):
"""CHARIOT: test in the beginning of the game that a red chariot can perform a valid sideway move"""
# take a soldier for red
g = JanggiGame()
g.make_move('c7', 'c6') # because blue moves first
g.make_move('a1', 'a2') # move the red chariot ahead
g.make_move('g7', 'g6') # blue moves
red_chariot_sideway_move = g.make_move('a2', 'c2') #chariot moves sideway
self.assertIs(red_chariot_sideway_move, True)
# @visibility('visible')
def test_valid_forward_move_for_blue_chariot(self):
"""CHARIOT: test in the beginning of the game that a blue chariot can perform a valid forward move"""
g = JanggiGame()
blue_chariot_forward_move = g.make_move('a10', 'a8') # because blue moves first
self.assertIs(blue_chariot_forward_move, True)
# @visibility('visible')
def test_valid_sideway_move_for_blue_chariot(self):
"""CHARIOT: test in the beginning of the game that a blue chariot can perform a valid sideway move"""
g = JanggiGame()
g.make_move('a10', 'a9') # because blue moves first
g.make_move('a1', 'a2') # move the red chariot ahead
blue_chariot_sideway_move = g.make_move('a9','c9')
self.assertIs(blue_chariot_sideway_move, True)
# @visibility('visible')
def test_valid_backward_move_for_blue_chariot_in_south(self):
"""CHARIOT: test in the beginning of the game that a blue chariot can perform a backward move to south"""
g = JanggiGame()
g.make_move('a10', 'a8') # because blue moves first
g.make_move('a1', 'a2') # move the red chariot ahead
blue_chariot_backward_move = g.make_move('a8', 'a9') #chariot moves backward
self.assertIs(blue_chariot_backward_move, True)
pass
# @visibility('visible')
def test_valid_backward_move_for_blue_chariot_in_west(self):
"""CHARIOT: test in the beginning of the game that a blue chariot can perform a backward move to west"""
g = JanggiGame()
g.make_move('a10', 'a9') # because blue moves first
g.make_move('a1', 'a2') # move the red chariot ahead
g.make_move('a9','c9') #blue chariot moves east
g.make_move('a2','a3') #move the red chariot ahead
blue_chariot_backward_move = g.make_move('c9','b9')
self.assertIs(blue_chariot_backward_move, True)
pass
# @visibility('after_due_date')
def test_invalid_diagonal_move_for_blue_chariot_in_northeast(self):
"""CHARIOT: test in the beginning of the game that a blue chariot cannot perform a diagonal move"""
g = JanggiGame()
g.make_move('a10', 'a8') # because blue moves first
g.make_move('a1', 'a3') # move the red chariot ahead
invalid_chariot_diagonal_move = g.make_move('a8','b7') #blue chariot moves east
self.assertIs(invalid_chariot_diagonal_move, False)
pass
# @visibility('after_due_date')
def test_invalid_diagonal_move_for_red_chariot_to_southeast(self):
"""CHARIOT: test in the beginning of the game that a red chariot cannot perform a diagonal move"""
g = JanggiGame()
g.make_move('a10', 'a8') # because blue moves first
g.make_move('a1', 'a3') # move the red chariot ahead
g.make_move('a7','b7') # blue soldier moves ahead
invalid_chariot_diagonal_move = g.make_move('a3', 'b4') #red chariot moves southeast
self.assertIs(invalid_chariot_diagonal_move, False)
pass
# @visibility('visible')
def test_valid_north_move_for_blue_cannon(self):
"""CANNON: test blue cannon can perform a valid north move"""
g = JanggiGame()
g.make_move('a7','b7') # blue soldier move sideway
g.make_move('a4','a5') # red move
cannon_valid_move = g.make_move('b8','b4') # blue cannon jumps over blue soldier
self.assertIs(cannon_valid_move, True)
# @visibility('visible')
def test_valid_east_move_for_blue_cannon(self):
"""CANNON: test blue cannon can perform a valid east move"""
g = JanggiGame()
g.make_move('c7', 'c6') # blue soldier move ahead
g.make_move('a4', 'a5') # red move
g.make_move('a7', 'b7') # blue soldier move sideway
g.make_move('a5', 'a6') # red move
g.make_move('b8', 'b6') # blue cannon jumps north
g.make_move('a6', 'a7') # red move
cannon_valid_move = g.make_move('b6', 'e6') # blue cannon jumps over blue soldier east two steps
self.assertIs(cannon_valid_move, True)
# @visibility('after_due_date')
def test_valid_west_move_for_blue_cannon(self):
"""CANNON: test blue cannon can perform a valid west move"""
g = JanggiGame()
g.make_move('g7','h7') #blue soldier moves to make a screen
g.make_move('g4','g5') #red soldier moves to make a future screen
g.make_move('h8','h5') #blue cannon moves front
g.make_move('g5','g5') #red passes
cannon_valid_west_move = g.make_move('h5','a5') #blue cannon jumps west
self.assertIs(cannon_valid_west_move, True)
#now try moving that cannon again back to make sure it was actually placed there
g.make_move('g5','g5') #red passes
valid_cannon_move_back = g.make_move('a5','h5') #blue cannon moves east
self.assertIs(valid_cannon_move_back, True)
# @visibility('after_due_date')
def test_valid_south_move_for_blue_cannon(self):
"""CANNON: test blue cannon can perform a valid south move"""
g = JanggiGame()
g.make_move('g7','h7') #blue soldier moves to make a screen
g.make_move('g4','g5') #red soldier moves to make a future screen
g.make_move('h8','h5') #blue cannon moves front
g.make_move('g5','g5') #red passes
g.make_move('h5','a5') #blue cannon jumps west
g.make_move('g5','g5') #red passes
valid_cannon_move_south = g.make_move('a5','a9') #blue cannon moves south
self.assertIs(valid_cannon_move_south, True)
#now try moving that cannon again back to make sure it was actually placed there
g.make_move('g5','g5') #red passes
valid_cannon_move_back = g.make_move('a9','a6') #blue cannon moves north
self.assertIs(valid_cannon_move_back, True)
# @visibility('visible')
def test_invalid_diagonal_move_for_blue_cannon_with_a_screen(self):
"""CANNON: test blue cannon cannot perform an invalid diagonal move with a screen"""
g = JanggiGame()
cannon_invalid_move = g.make_move('b8', 'd6') # blue cannon jumps northwest
self.assertIs(cannon_invalid_move, False)
# @visibility('visible')
def test_invalid_forward_move_for_blue_cannon_without_a_screen(self):
"""CANNON: test blue cannon cannot perform an invalid forward move without a screen"""
g = JanggiGame()
cannon_invalid_move = g.make_move('b8', 'b7') # blue cannon move forward without a screen
self.assertIs(cannon_invalid_move, False)
# @visibility('after_due_date')
def test_invalid_forward_move_for_blue_cannon_capturing_another_cannon(self):
"""CANNON: test blue cannon cannot perform an invalid capture of red cannon"""
g = JanggiGame()
g.make_move('c7', 'c6') # blue soldier move ahead
g.make_move('a4', 'a5') # red move
g.make_move('a7', 'b7') # blue soldier move sideway
g.make_move('a5', 'a6') # red move
g.make_move('b8', 'b6') # blue cannon jumps north
g.make_move('c4', 'b4') # red move
invalid_cannon_move = g.make_move('b6', 'b3') # cannon jumps over red soldier to capture red cannon
self.assertIs(invalid_cannon_move, False)
# @visibility('after_due_date')
def test_invalid_forward_move_for_blue_cannon_jumping_over_red_cannon(self):
"""CANNON: test blue cannon cannot perform an invalid jump over red cannon"""
g = JanggiGame()
g.make_move('c7', 'c6') # blue soldier move ahead
g.make_move('a4', 'a5') # red move
g.make_move('a7', 'b7') # blue soldier move sideway
g.make_move('a5', 'a6') # red move
g.make_move('b8', 'b6') # blue cannon jumps north
g.make_move('c4', 'c5') # red move
invalid_cannon_move = g.make_move('b6', 'b2') # blue cannon jumps over red cannon
self.assertIs(invalid_cannon_move, False)
# @visibility('after_due_date')
def test_valid_forward_move_for_blue_cannon_capturing_red_soldier(self):
"""CANNON: test blue cannon can capture red soldier successfully"""
g = JanggiGame()
g.make_move('c7', 'c6') # blue soldier move ahead
g.make_move('a4', 'a5') # red move
g.make_move('a7', 'b7') # blue soldier move sideway
g.make_move('a5', 'b5') # red move
capturing_move = g.make_move('b8', 'b5') # blue cannon jumps north to capture red soldier
self.assertIs(capturing_move, True)
move_on_a_non_existent_piece = g.make_move('b5', 'b6')
self.assertIs(move_on_a_non_existent_piece, False)
valid_move_by_red = g.make_move('e4', 'e5') # valid move by Red
self.assertIs(valid_move_by_red, True)
"""HORSES"""
# @visibility('visible')
def test_valid_forward_move_for_blue_horse_on_west_side(self):
"""HORSE: test blue horse from west can make a valid move forward"""
g = JanggiGame()
valid_move = g.make_move('c10', 'd8')
self.assertIs(valid_move, True)
# @visibility('visible')
def test_valid_forward_move_for_blue_horse_on_east_side(self):
"""HORSE: test blue horse from east can make a valid move forward"""
g = JanggiGame()
valid_move = g.make_move('h10', 'g8')
self.assertIs(valid_move, True)
# @visibility('visible')
def test_invalid_forward_move_for_blue_horse_on_west_side(self):
"""HORSE: test blue horse from west cannot make an invalid move forward"""
g = JanggiGame()
invalid_move = g.make_move('c10', 'd7')
self.assertIs(invalid_move, False)
# @visibility('visible')
def test_invalid_forward_move_for_blue_horse_on_east_side(self):
"""HORSE: test blue horse from east cannot make an invalid move forward"""
g = JanggiGame()
valid_move = g.make_move('h10', 'f8')
self.assertIs(valid_move, False)
# @visibility('visible')
def test_horse_is_blocked_by_a_piece(self):
"""HORSE: test horse cannot jump over their own piece"""
g = JanggiGame()
g.make_move('c10', 'd8') #blue horse moves
g.make_move('c1','d3') #red horse move
g.make_move('c7','d7') #blue soldier moves to block the blue horse
g.make_move('c4','d4') #red soldier moves to block the red horse
try:
invalid_move_because_of_block = g.make_move('d8','c6') #invalid move because blocked by own soldier
self.assertIs(invalid_move_because_of_block, False)
except:
self.fail("Blue horse from west should not be able to jump over a blue piece")
g.make_move('d8','d8') # blue passes the turn
try:
invalid_move_because_of_block = g.make_move('d3','d6') #invalid move by red because blocked by own soldier
self.assertIs(invalid_move_because_of_block, False)
except:
self.fail("Red horse from west should not be able to jump over a red piece")
g.make_move('d3','d3') #red passes the turn
g.make_move('h10', 'g8') #blue horse move
g.make_move('h2','g3') #red horse move
try:
invalid_move_because_of_block = g.make_move('g8','h6') #invalid move by blue because blocked by own soldier
self.assertIs(invalid_move_because_of_block, False)
except:
self.fail("Blue horse from east should not be able to jump over a blue piece")
g.make_move('g8','g8') #blue passes the turn
try:
invalid_move_because_of_block = g.make_move('g3','g5') #invalid move because blocked by own soldier
self.assertIs(invalid_move_because_of_block, False)
except:
self.fail("Red horse from east should not be able to jump over a red piece")
# @visibility('visible')
def test_horse_can_capture_a_piece(self):
"""HORSE: test that a horse can capture another player's piece"""
g = JanggiGame()
g.make_move('c10','d8') #blue horse moves
g.make_move('c1','d3') #red horse moves
g.make_move('e7','e6') #blue soldier
g.make_move('e4','e5') #red
g.make_move('c7','c6') #blue
g.make_move('c4','c5') #red
g.make_move('c6','c5') #blue soldier captures red
g.make_move('e5','e6') #red soldier captures blue
try:
capturing_move = g.make_move('d8','e6')
self.assertIs(capturing_move, True)
except:
self.fail("Blue Horse from West should be able to capture a Red soldier")
try:
capturing_move = g.make_move('d3','c5') #red capture
self.assertIs(capturing_move, True)
except:
self.fail("Red Horse from West should be able to capture a Blue Soldier")
g.make_move('h10', 'g8') # blue horse from east
g.make_move('h1', 'i3') #red horse from east
g.make_move('g7','h7') #blue
g.make_move('g4', 'f4') #red
g.make_move('h7', 'h6') #blue
g.make_move('i4','i5') #red
g.make_move('h6','h5') #blue
g.make_move('i5','i6') #red
g.make_move('g8','g8') #blue passes
try:
valid_capture_move = g.make_move('i3','h5')
self.assertIs(valid_capture_move, True)
except:
self.fail("Red Horse from East should be able to capture a Blue Soldier")
g.make_move('g8','g8') #blue passes
g.make_move('i6','h6') #red moves
try:
valid_capture_move = g.make_move('g8', 'h6')
self.assertIs(valid_capture_move, True)
except:
self.fail("Blue Horse from East should be able to capture a Red soldier")
# @visibility('after_due_date')
def test_horse_cannot_capture_own_piece(self):
"""HORSE: test that a horse cannot capture same player's piece"""
g = JanggiGame()
try:
invalid_capturing_move = g.make_move('c10', 'b8') # blue horse tries to get at the same place as Cannon
self.assertIs(invalid_capturing_move, False)
except:
self.fail("Blue Horse from west should not be able to capture a blue piece")
g.make_move('g7', 'g7') # blue passing move
try:
invalid_capturing_move = g.make_move('c1', 'b3') # red horse tries to get at the same place as Cannon
self.assertIs(invalid_capturing_move, False)
except:
self.fail("Red Horse from west should not be able to capture a red piece")
try:
g.make_move('h1', 'g3') #red horse moves
g.make_move('g7', 'g7') #blue passes
invalid_capturing_move = g.make_move('g3', 'f1') #red horse tries to get at the same place as red guard
self.assertIs(invalid_capturing_move, False)
except:
self.fail("Red Horse from west should not be able to capture a red piece")
g.make_move('f1','f1') #red passing move
try:
g.make_move('h10', 'g8') # blue horse moves
g.make_move('f1', 'f1') # red passes
invalid_capturing_move = g.make_move('g8', 'f10') # blue horse tries to get at the same place as blue guard
self.assertIs(invalid_capturing_move, False)
except:
self.fail("Blue Horse from east should not be able to capture a blue piece")
"""GUARDS"""
# @visibility('visible')
def test_valid_move_for_guard(self):
"""GUARD: test valid moves for guard"""
g = JanggiGame()
try:
valid_move = g.make_move('d10','d9') #blue guard moves
self.assertIs(valid_move, True)
except:
self.fail("Blue Guard from west should be able to make a valid move")
try:
valid_move = g.make_move('d1','d2') #red guard moves
self.assertIs(valid_move, True)
except:
self.fail("Red Guard from west should be able to make a valid move")
try:
valid_move = g.make_move('f10','f9') #blue guard moves
self.assertIs(valid_move, True)
except:
self.fail("Blue Guard from east should be able to make a valid move")
try:
valid_move = g.make_move('f1','f2') #red guard moves
self.assertIs(valid_move, True)
except:
self.fail("Red Guard from east should be able to make a valid move")
try:
valid_move = g.make_move('d9','d8') #blue guard moves
self.assertIs(valid_move, True)
except:
self.fail("Blue Guard from west should be able to make a valid move")
try:
valid_move = g.make_move('d2','d3') #red guard moves
self.assertIs(valid_move, True)
except:
self.fail("Red Guard from west should be able to make a valid move")
try:
valid_move = g.make_move('f9','f8') #blue guard moves
self.assertIs(valid_move, True)
except:
self.fail("Blue Guard from east should be able to make a valid move")
try:
valid_move = g.make_move('f2','f3') #red guard moves
self.assertIs(valid_move, True)
except:
self.fail("Red Guard from east should be able to make a valid move")
#perform diagonal moves
g.make_move('e9','e10') #blue general moves
g.make_move('e2','e1') #red general moves
try:
valid_diagonal_move = g.make_move('d8','e9') #blue guard moves
self.assertIs(valid_diagonal_move, True)
except:
self.fail("Blue Guard should be able to make a diagonal move")
try:
valid_diagonal_move = g.make_move('d3','e2') #red guard moves
self.assertIs(valid_diagonal_move, True)
except:
self.fail("Red Guard should be able to make a diagonal move")
# @visibility('after_due_date')
def test_invalid_move_for_guard(self):
"""GUARD: test invalid moves for guard"""
g = JanggiGame()
g.make_move('d10','d9') #blue moves
g.make_move('d1','d2') #red moves
g.make_move('f10','f9') #blue moves
g.make_move('f1','f2') #red moves
g.make_move('d9','d8') #blue moves
g.make_move('d2','d3') #red moves
g.make_move('f9','f8') #blue moves
g.make_move('f2','f3') #red moves
try:
invalid_move = g.make_move('d8','c8') #blue guard tries to move outside the palace
self.assertIs(invalid_move, False)
except:
self.fail("Blue Guard should not be able to move outside the palace")
g.make_move('d8','d8') #blue passes
try:
invalid_move = g.make_move('d3','c3') #red guard tries to move outside the palace
self.assertIs(invalid_move, False)
except:
self.fail("Red Guard should not be able to move outside the palace")
g.make_move('d3','d3') #red passes
try:
invalid_move = g.make_move('f8','g8') #blue guard tries to move outside the palace
self.assertIs(invalid_move, False)
except:
self.fail("Blue Guard should not be able to move outside the palace")
g.make_move('f8','f8') #blue passes
try:
invalid_move = g.make_move('f3','g3') #red guard tries to move outside the palace
self.assertIs(invalid_move, False)
except:
self.fail("Red Guard should not be able to move outside the palace")
# @visibility('visible')
def test_valid_move_for_elephant(self):
"""ELEPHANT: test elephants can make valid moves"""
g = JanggiGame()
try:
valid_move = g.make_move('b10','d7') #blue elephant moves
self.assertIs(valid_move, True)
except:
self.fail("Blue Elephant from west should be able to make a valid move")
try:
valid_move = g.make_move('b1','d4') #red elephant moves
self.assertIs(valid_move, True)
except:
self.fail("Red Elephant from west should be able to make a valid move")
g.make_move('e7', 'e6') # blue soldier moves to make place for eastern blue elephant
g.make_move('e4', 'e5') # red soldier moves to make place for eastern red elephant
try:
valid_move = g.make_move('g10','e7') #blue elephant moves
self.assertIs(valid_move, True)
except:
self.fail("Blue Elephant from east should be able to make a valid move")
try:
valid_move = g.make_move('g1','e4') #red elephant moves
self.assertIs(valid_move, True)
except:
self.fail("Red Elephant from east should be able to make a valid move")
# @visibility('after_due_date')
def test_invalid_move_for_elephant(self):
"""ELEPHANT: test elephant cannot make invalid moves"""
g = JanggiGame()
invalid_move = g.make_move('b10','c8') #blue
self.assertIs(invalid_move, False)
invalid_move = g.make_move('b10','b9') #moves forward like a soldier
self.assertIs(invalid_move, False)
invalid_move = g.make_move('b10','b9') #move one diagonal
self.assertIs(invalid_move, False)
invalid_move = g.make_move('b10','b7') #tries to jump over cannon
self.assertIs(invalid_move, False)
g.make_move('b10','b10') #blue passes
#try on red
invalid_move = g.make_move('g1','g2') #red move like a soldier
self.assertIs(invalid_move, False)
invalid_move = g.make_move('g1','f2') #move one diagonal
self.assertIs(invalid_move, False)
invalid_move = g.make_move('g1','g5') #tries to jump over the soldier
self.assertIs(invalid_move, False)
# @visibility('visible')
def test_valid_moves_for_general(self):
"""GENERAL: test general can perform valid moves"""
g = JanggiGame()
valid_move = g.make_move('e9', 'f8') # blue
self.assertIs(valid_move, True)
valid_move = g.make_move('e2', 'f3') # red
self.assertIs(valid_move, True)
valid_move = g.make_move('f8','e8') # blue
self.assertIs(valid_move, True)
valid_move = g.make_move('f3','e3') # red
self.assertIs(valid_move, True)
valid_move = g.make_move('e8','d8') # blue
self.assertIs(valid_move, True)
valid_move = g.make_move('e3', 'd3') # red
self.assertIs(valid_move, True)
valid_move = g.make_move('d8', 'd9') # blue
self.assertIs(valid_move, True)
valid_move = g.make_move('d3', 'd2') # red
self.assertIs(valid_move, True)
valid_move = g.make_move('d9','e9') #blue
self.assertIs(valid_move, True)
valid_move = g.make_move('d2','e2') #red
self.assertIs(valid_move, True)
valid_move = g.make_move('e9', 'e10') #blue
self.assertIs(valid_move, True)
valid_move = g.make_move('e2','e1') #red
self.assertIs(valid_move, True)
# @visibility('visible')
def test_invalid_moves_for_general(self):
"""GENERAL: test general cannot perform invalid moves"""
g = JanggiGame()
valid_move = g.make_move('e9', 'f8') # blue
valid_move = g.make_move('e2', 'f3') # red
#moving outside the palace
invalid_move = g.make_move('f8','g8') #blue
self.assertIs(invalid_move, False)
#blue passes
g.make_move('f8','f8')
invalid_move = g.make_move('f3', 'f4') # red
self.assertIs(invalid_move, False)
# red passes
g.make_move('f3', 'f3')
#prepare for the move
g.make_move('f10','e10') #blue
g.make_move('f1','e1') #red
#moving two spaces
invalid_move = g.make_move('f8','f10')
self.assertIs(invalid_move, False)
g.make_move('f8','f8') #blue passes
# moving two spaces
invalid_move = g.make_move('f3', 'f1')
self.assertIs(invalid_move, False)
g.make_move('f3','f3') #red passes
g.make_move('e10','e9') #blue moves
g.make_move('e1','e2') #red moves
#moving at a place where another piece already exists
invalid_move = g.make_move('f8','e9') #blue
self.assertIs(invalid_move, False)
g.make_move('f8','f8') #blue passes
invalid_move = g.make_move('f3', 'e2') # blue
self.assertIs(invalid_move, False)
# @visibility('visible')
def test_unfinished_game_state(self):
"""RULES: test that get_game_state returns UNFINISHED correctly"""
g = JanggiGame()
g.make_move('e7','f7') #blue
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
g.make_move('e4','e5')#red
self.assertEqual(g.get_game_state().upper(), 'UNFINISHED')
# @visibility('after_due_date')
def test_unfinished_game_state_after_capture(self):
"""RULES: test that get_game_state returns UNFINISHED correctly after a piece is captured"""
g = JanggiGame()
g.make_move('c7','c6') #blue moves
g.make_move('e4','f4') #red moves
g.make_move('b10','d7') #blue elephant moves
g.make_move('d1','d2') #red
g.make_move('c10','c9') #blue
g.make_move('g4','h4') #red
g.make_move('i10','i8')
g.make_move('e2','f2') #red
g.make_move('g7','h7')
g.make_move('h3','h5') #red
g.make_move('h7','g7')
g.make_move('c1','e2') #red
g.make_move('i7','i6')
g.make_move('g1','e4') #red
g.make_move('i6','h6') #blue captures red cannon
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED if no one has won")
# @visibility('visible')
def test_is_in_check_blue(self):
"""RULES: test that is_in_check detects check correctly for blue"""
g = JanggiGame()
g.make_move('c7','c6') #blue
g.make_move('c1','d3') #red
g.make_move('b10','d7') #blue
g.make_move('b3','e3') #red
g.make_move('c10','d8') #blue
g.make_move('h1','g3') #red
g.make_move('e7','e6') # blue
g.make_move('e3', 'e6') #red cannon captures soldier -- check here
try:
self.assertEqual(g.get_game_state().upper(), 'UNFINISHED')
except:
self.fail("Game state should be unfinished before an actual check")
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for blue")
g.make_move('h8','c8') #blue cannon moves -- check here
g.make_move('d3','e5') #red
g.make_move('c8','c4') #blue cannon captures red soldier -- check here
try:
self.assertEqual(g.get_game_state().upper(), 'UNFINISHED')
except:
self.fail("Game state should be unfinished before an actual check")
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for blue")
g.make_move('e5','c4') #red horse captures blue cannon
g.make_move('i10','i8') #blue chariot moves
g.make_move('g4','f4')
g.make_move('i8','f8') #blue chariot moves sideway
g.make_move('g3','h5')
g.make_move('h10','g8') #blue horse
self.assertTrue(g.make_move('e6','e3')) #red CHECKS blue using a cannon -- special test for checks using a cannon -- check here
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED when a general is in check but not checkmated")
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("Red General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), True)
except:
self.fail("Blue General is in check and is_in_check should return True for blue")
g.make_move('e9','d9') #general moves to avoid check -- check here
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for blue")
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED when a general is in check")
# @visibility('after_due_date')
def test_is_in_check_red(self):
"""RULES: test that is_in_check detects check correctly for red"""
g = JanggiGame()
g.make_move('c7','c6') #blue
g.make_move('c1','d3') #red
g.make_move('b10','d7') #blue
g.make_move('b3','e3') #red
g.make_move('c10','d8')
g.make_move('h1','g3')
g.make_move('e7','e6')
g.make_move('e3', 'e6') #red cannon captures soldier -- check here
try:
self.assertEqual(g.get_game_state().upper(), 'UNFINISHED')
except:
self.fail("Game state should be unfinished before an actual check")
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for Red")
try:
self.assertIs(g.is_in_check('blue'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for Blue")
g.make_move('h8','c8') #blue moves -- check here
g.make_move('d3','e5') #red
g.make_move('c8','c4') #blue cannon captures red soldier -- check here
try:
self.assertEqual(g.get_game_state().upper(), 'UNFINISHED')
except:
self.fail("Game state should be unfinished before an actual win")
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for Red")
try:
self.assertIs(g.is_in_check('blue'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for Blue")
g.make_move('e5','c4') #red horse captures blue cannon
g.make_move('i10','i8') #blue chariot moves
g.make_move('g4','f4')
g.make_move('i8','f8') #blue chariot moves sideway
g.make_move('g3','h5')
g.make_move('h10','g8') #blue horse
g.make_move('e6','e3') #red CHECKS blue using a cannon -- special test for checks using a cannon -- check here
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED when a general is in check but not checkmated")
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("Red General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), True)
except:
self.fail("Blue General is in check and is_in_check should return True for blue")
g.make_move('e9','d9') #blue moves
g.make_move('c4','e5') #red
g.make_move('c6','d6')
g.make_move('e5','c4')
g.make_move('a7','a6')#blue
g.make_move('h3', 'h9') #red cannon moves to a position where it COULD Check but has not. -- check here
g.make_move('a10','a7')
g.make_move('c4','d6') #red horse captures blue soldier
g.make_move('a6','b6')
g.make_move('h5','g7')
g.make_move('b8','b1')#blue cannon captures red elephant
g.make_move('a1','b1') #red chariot captures blue cannon
g.make_move('a7','a4')
g.make_move('b1','c1')
g.make_move('a4','a2') #blue CHECKS red using a chariot -- check here
try:
self.assertIs(g.is_in_check('red'), True)
except:
self.fail("Red General is in check and yet is_in_check returns False for red")
try:
self.assertIs(g.is_in_check('blue'), False)
except:
self.fail("Blue General is not in check and yet is_in_check returns True for blue")
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED when a general is in check but not checkmated")
g.make_move('e2','e1') #red general moves to avoid capture -- check after this
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for blue")
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED when a general is in check")
# @visibility('after_due_date')
def test_is_in_check_after_a_check_was_countered(self):
"""RULES: test that is_in_check detects check correctly after a check is countered"""
g = JanggiGame()
g.make_move('c7','c6') #blue
g.make_move('c1','d3') #red
g.make_move('b10','d7') #blue
g.make_move('b3','e3') #red
g.make_move('c10','d8')
g.make_move('h1','g3')
g.make_move('e7','e6')
g.make_move('e3', 'e6') #red cannon captures soldier -- check here
g.make_move('h8','c8') #blue moves -- check here
g.make_move('d3','e5') #red
g.make_move('c8','c4') #blue cannon captures red soldier -- check here
g.make_move('e5','c4') #red horse captures blue cannon
g.make_move('i10','i8') #blue chariot moves
g.make_move('g4','f4')
g.make_move('i8','f8') #blue chariot moves sideway
g.make_move('g3','h5')
g.make_move('h10','g8') #blue horse
g.make_move('e6','e3') #red CHECKS blue using a cannon -- special test for checks using a cannon -- check here
g.make_move('e9','d9') #general moves to avoid check -- check here
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for Red")
try:
self.assertIs(g.is_in_check('blue'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for Blue")
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED since no one has won")
# @visibility('after_due_date')
def test_a_checkmate_is_detected_correctly(self):
g = JanggiGame()
g.make_move('c7','c6') #blue
g.make_move('c1','d3') #red
g.make_move('b10','d7') #blue
g.make_move('b3','e3') #red
g.make_move('c10','d8')
g.make_move('h1','g3')
g.make_move('e7','e6')
g.make_move('e3', 'e6') #red cannon captures soldier -- check here
try:
self.assertEqual(g.get_game_state().upper(), 'UNFINISHED')
except:
self.fail("Game state should be unfinished before an actual check")
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for Red")
try:
self.assertIs(g.is_in_check('blue'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for Blue")
g.make_move('h8','c8') #blue moves -- check here
g.make_move('d3','e5') #red
g.make_move('c8','c4') #blue cannon captures red soldier -- check here
try:
self.assertEqual(g.get_game_state().upper(), 'UNFINISHED')
except:
self.fail("Game state should be unfinished before an actual win")
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for Red")
try:
self.assertIs(g.is_in_check('blue'), False)
except:
self.fail("General is not in check and yet is_in_check returns True for Blue")
g.make_move('e5','c4') #red horse captures blue cannon
g.make_move('i10','i8') #blue chariot moves
g.make_move('g4','f4')
g.make_move('i8','f8') #blue chariot moves sideway
g.make_move('g3','h5')
g.make_move('h10','g8') #blue horse
g.make_move('e6','e3') #red CHECKS blue using a cannon -- special test for checks using a cannon -- check here
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED when a general is in check but not checkmated")
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("Red General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), True)
except:
self.fail("Blue General is in check and is_in_check should return True for blue")
g.make_move('e9','d9') #blue moves
g.make_move('c4','e5') #red
g.make_move('c6','d6')
g.make_move('e5','c4')
g.make_move('a7','a6')#blue
g.make_move('h3', 'h9') #red cannon moves to a position where it COULD Check but has not. -- check here
g.make_move('a10','a7')
g.make_move('c4','d6') #red horse captures blue soldier
g.make_move('a6','b6')
g.make_move('h5','g7')
g.make_move('b8','b1')#blue cannon captures red elephant
g.make_move('a1','b1') #red chariot captures blue cannon
g.make_move('a7','a4')
g.make_move('b1','c1')
g.make_move('a4','a2') #blue CHECKS red using a chariot -- check here
g.make_move('e2','e1') #red general moves to avoid capture -- check after this
g.make_move('i7','h7') #blue moves
g.make_move('c1','c9') #red chariot moves to the palace to CHECKMATE blue
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("Red General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), True)
except:
self.fail("Blue General is in check and is_in_check should return True for blue")
try:
self.assertEqual(g.get_game_state().upper(),'RED_WON')
except:
self.fail("Game state should be RED_WON when the BLUE general is checkmated")
# @visibility('after_due_date')
def test_a_move_cannot_be_made_after_checkmate(self):
"""RULES: test that a move cannot be made after checkmate i.e. the one of the players has won"""
g = JanggiGame()
g.make_move('c7','c6') #blue
g.make_move('c1','d3') #red
g.make_move('b10','d7') #blue
g.make_move('b3','e3') #red
g.make_move('c10','d8')
g.make_move('h1','g3')
g.make_move('e7','e6')
g.make_move('e3', 'e6') #red cannon captures soldier -- check here
g.make_move('h8','c8') #blue moves -- check here
g.make_move('d3','e5') #red
g.make_move('c8','c4') #blue cannon captures red soldier -- check here
g.make_move('e5','c4') #red horse captures blue cannon
g.make_move('i10','i8') #blue chariot moves
g.make_move('g4','f4')
g.make_move('i8','f8') #blue chariot moves sideway
g.make_move('g3','h5')
g.make_move('h10','g8') #blue horse
g.make_move('e6','e3') #red CHECKS blue using a cannon -- special test for checks using a cannon -- check here
g.make_move('e9','d9') #blue moves
g.make_move('c4','e5') #red
g.make_move('c6','d6')
g.make_move('e5','c4')
g.make_move('a7','a6')#blue
g.make_move('h3', 'h9') #red cannon moves to a position where it COULD Check but has not. -- check here
g.make_move('a10','a7')
g.make_move('c4','d6') #red horse captures blue soldier
g.make_move('a6','b6')
g.make_move('h5','g7')
g.make_move('b8','b1')#blue cannon captures red elephant
g.make_move('a1','b1') #red chariot captures blue cannon
g.make_move('a7','a4')
g.make_move('b1','c1')
g.make_move('a4','a2') #blue CHECKS red using a chariot -- check here
g.make_move('e2','e1') #red general moves to avoid capture -- check after this
g.make_move('i7','h7') #blue moves
g.make_move('c1','c9') #red chariot moves to the palace to CHECKMATE blue
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("Red General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), True)
except:
self.fail("Blue General is in check and is_in_check should return True for blue")
try:
self.assertEqual(g.get_game_state().upper(),'RED_WON')
except:
self.fail("Game state should be RED_WON when the BLUE general is checkmated")
#try moving a non-General piece
try:
invalid_move_after_checkmate = g.make_move('c9', 'c1') #red chariot move
self.assertIs(invalid_move_after_checkmate, False)
invalid_move_after_checkmate = g.make_move('i8', 'i7') # red Elephant move
self.assertIs(invalid_move_after_checkmate, False)
except:
self.fail("make_move should not allow any move after a checkmate")
# try moving the General piece
try:
invalid_move_after_checkmate = g.make_move('d9', 'e9') # blue General move
self.assertIs(invalid_move_after_checkmate, False)
invalid_move_after_checkmate = g.make_move('e1', 'f2') # red General move
self.assertIs(invalid_move_after_checkmate, False)
except:
self.fail("make_move should not allow any move after a checkmate")
# @visibility('visible')
def test_turn_taking_is_implemented_correctly(self):
"""RULES: test that turn taking is implemented correctly"""
g = JanggiGame()
#blue moves first
try:
invalid_first_move = g.make_move('a1','a3') #red tries to move first
self.assertIs(invalid_first_move, False)
except:
self.fail("Red should not be able to move first")
try:
valid_first_move = g.make_move('a7','a6') #blue moves first
self.assertIs(valid_first_move, True)
except:
self.fail("Blue should be able to move first")
try:
valid_first_move = g.make_move('a1','a2') #red moves now
self.assertIs(valid_first_move, True)
except:
self.fail("Red should be able to move after blue")
try:
valid_first_move = g.make_move('d10','d9') #blue moves
self.assertIs(valid_first_move, True)
except:
self.fail("Blue should be able to move after red")
try:
valid_first_move = g.make_move('c4', 'c5') # red moves now
self.assertIs(valid_first_move, True)
except:
self.fail("Red should be able to move after blue")
# @visibility('after_due_date')
def test_that_horse_is_transposed_with_elephant_in_the_initial_setup_by_trying_to_move_it(self):
"""RULES: Transposition of Horse and Elephant is correct in the beginning"""
g = JanggiGame()
g.make_move('e7', 'e6') # blue soldier moves to make place for eastern blue elephant
g.make_move('e4', 'e5') # red soldier moves to make place for eastern red elephant
try:
valid_move = g.make_move('g10','e7') #blue elephant moves
self.assertIs(valid_move, True)
except:
self.fail("Blue Elephant from east is either not transposed correctly with Horse in the inital setup or cannot perform valid moves")
try:
valid_move = g.make_move('g1','e4') #red elephant moves
self.assertIs(valid_move, True)
except:
self.fail("Red Elephant from east is either not transposed correctly with Horse in the inital setup or cannot perform valid moves")
# @visibility('after_due_date')
def test_that_a_check_by_a_cannon_outside_the_palace_is_detected(self):
"""RULES: Check by a cannon outside the palace is detected"""
g = JanggiGame()
g.make_move('c7','c6') #blue
g.make_move('c1','d3') #red
g.make_move('b10','d7') #blue
g.make_move('b3','e3') #red
g.make_move('c10','d8')
g.make_move('h1','g3')
g.make_move('e7','e6')
g.make_move('e3', 'e6') #red cannon captures soldier -- check here
g.make_move('h8','c8') #blue cannon moves -- check here
g.make_move('d3','e5') #red
g.make_move('c8','c4') #blue cannon captures red soldier -- check here
g.make_move('e5','c4') #red horse captures blue cannon
g.make_move('i10','i8') #blue chariot moves
g.make_move('g4','f4')
g.make_move('i8','f8') #blue chariot moves sideway
g.make_move('g3','h5')
g.make_move('h10','g8') #blue horse
self.assertTrue(g.make_move('e6','e3')) #red CHECKS blue using a cannon -- special test for checks using a cannon -- check here
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED when a general is in check but not checkmated")
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("Red General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), True)
except:
self.fail("Blue General is in check with a Cannon outside the palace and is_in_check should return True for blue")
# @visibility('after_due_date')
def test_that_a_check_by_a_horse_outside_the_palace_is_detected(self):
"""RULES: Check by a horse outside the palace is detected"""
g = JanggiGame()
g.make_move('c7','c6') #blue
g.make_move('c1','d3') #red
g.make_move('b10','d7') #blue
g.make_move('b3','e3') #red
g.make_move('c10','d8')
g.make_move('h1','g3')
g.make_move('e7','e6')
g.make_move('e3', 'e6') #red cannon captures soldier -- check here
g.make_move('h8','c8') #blue cannon moves -- check here
g.make_move('d3','e5') #red
g.make_move('c8','c4') #blue cannon captures red soldier -- check here
g.make_move('e5','c4') #red horse captures blue cannon
g.make_move('i10','i8') #blue chariot moves
g.make_move('g4','f4')
g.make_move('i8','f8') #blue chariot moves sideway
g.make_move('g3','h5')
g.make_move('h10','g8') #blue horse
g.make_move('e6','e3') #red CHECKS blue using a cannon -- special test for checks using a cannon -- check here
g.make_move('e9','d9') #blue general moves to counter check
g.make_move('c4','b6') #red horse moves
g.make_move('g7','f7') #blue soldier moves
g.make_move('e4','e5') #red moves
g.make_move('f7','e7') #blue moves
g.make_move('e5','e6') #red moves
g.make_move('d9','e9') #blue general moves
g.make_move('b6','a8') #red horse moves southwest
g.make_move('d8','b7') #blue horse moves
g.make_move('a8','b10') #red horse moves
g.make_move('e9','e9') #blue passes
g.make_move('b10','c8') #red horse puts blue general in check
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED when a general is in check but not checkmated")
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("Red General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), True)
except:
self.fail("Blue General is in check with a Horse outside the palace and is_in_check should return True for blue")
# @visibility('after_due_date')
def test_that_countering_the_check_by_capturing_the_cannon_is_detected_correctly(self):
"""RULES: Countering the check by capturing the cannon is detected correctly"""
g = JanggiGame()
g.make_move('c7','c6') #blue
g.make_move('c1','d3') #red
g.make_move('b10','d7') #blue
g.make_move('b3','b3') #red passes
g.make_move('c10','d8')
g.make_move('h1','g3')
g.make_move('e7','e6')
g.make_move('b3','b3') #red passes
g.make_move('e6', 'f6') #blue soldier moves sidewways
g.make_move('b3','b3') #red passes
g.make_move('h8','c8') #blue cannon moves -- check here
g.make_move('d3','e5') #red
g.make_move('c8','c4') #blue cannon captures red soldier -- check here
g.make_move('e5','c4') #red horse captures blue cannon
g.make_move('i10','i8') #blue chariot moves
g.make_move('g4','f4')
g.make_move('i8','f8') #blue chariot moves sideway
g.make_move('g3','h5')
g.make_move('h10','g8') #blue horse
g.make_move('d1','d2') #red moves
g.make_move('e9','e9') #blue passes
g.make_move('d2','d3') #red moves
self.assertTrue(g.make_move('e9','e9')) #blue passes
self.assertTrue(g.make_move('d3','d3')) #red passes
#prep moves to kill the cannon in future
self.assertTrue(g.make_move('g7','g6')) #blue soldier moves
self.assertTrue(g.make_move('f4','g4')) #red soldier moves sideways
self.assertTrue(g.make_move('f6','f5')) #blue soldier moves
self.assertTrue(g.make_move('g4','g5')) #red soldier moves
self.assertTrue(g.make_move('g8','f6')) #blue horse moves
self.assertTrue(g.make_move('e2','e2')) #red passes
self.assertTrue(g.make_move('f6','d5')) #blue horse moves
self.assertTrue(g.make_move('b3','e3')) #red CHECKS blue using a cannon -- special test for checks using a cannon -- check here
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED when a general is in check but not checkmated")
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("Red General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), True)
except:
self.fail("Blue General is in check with a Cannon outside the palace and is_in_check should return True for blue")
counter_check_move = g.make_move('d5','e3') #evade the cannon check by capturing the cannon using a horse
self.assertTrue(counter_check_move)
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("Red General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), False)
except:
self.fail("Countering of the check by cannon by capturing it should be detected correctly and is_in_check should return False for blue")
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED when a general is in check but not checkmated")
# @visibility('after_due_date')
def test_that_countering_the_check_by_blocking_the_cannon_is_detected_correctly(self):
"""RULES: Countering the check by blocking the cannon is detected correctly"""
g = JanggiGame()
g.make_move('c7','c6') #blue
g.make_move('c1','d3') #red
g.make_move('b10','d7') #blue
g.make_move('b3','b3') #red passes
g.make_move('c10','d8')
g.make_move('h1','g3')
g.make_move('e7','e6')
g.make_move('b3','b3') #red passes
g.make_move('e6', 'f6') #blue soldier moves sidewways
g.make_move('b3','b3') #red passes
g.make_move('h8','c8') #blue cannon moves -- check here
g.make_move('d3','e5') #red
g.make_move('c8','c4') #blue cannon captures red soldier -- check here
g.make_move('e5','c4') #red horse captures blue cannon
g.make_move('i10','i8') #blue chariot moves
g.make_move('g4','f4')
g.make_move('i8','f8') #blue chariot moves sideway
g.make_move('g3','h5')
g.make_move('h10','g8') #blue horse
g.make_move('d1','d2') #red moves
g.make_move('e9','e9') #blue passes
g.make_move('d2','d3') #red moves
g.make_move('e9','e9') #blue passes
self.assertTrue(g.make_move('b3','e3')) #red CHECKS blue using a cannon -- special test for checks using a cannon -- check here
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED when a general is in check but not checkmated")
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("Red General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), True)
except:
self.fail("Blue General is in check with a Cannon outside the palace and is_in_check should return True for blue")
counter_check_move = g.make_move('f8','e8') #evade the cannon check by blocking the cannon using the chariot
self.assertTrue(counter_check_move)
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("Red General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), False)
except:
self.fail("Countering of the check by cannon by blocking it should be detected correctly and is_in_check should return False for blue")
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED when a general is in check but not checkmated")
# @visibility('after_due_date')
def test_that_check_forces_a_move_to_be_made_to_counter_the_check(self):
"""RULES: Check forces a move to be made to counter the check"""
g = JanggiGame()
g.make_move('c7','c6') #blue
g.make_move('c1','d3') #red
g.make_move('b10','d7') #blue
g.make_move('b3','b3') #red passes
g.make_move('c10','d8')
g.make_move('h1','g3')
g.make_move('e7','e6')
g.make_move('b3','b3') #red passes
g.make_move('e6', 'f6') #blue soldier moves sidewways
g.make_move('b3','b3') #red passes
g.make_move('h8','c8') #blue cannon moves -- check here
g.make_move('d3','e5') #red
g.make_move('c8','c4') #blue cannon captures red soldier -- check here
g.make_move('e5','c4') #red horse captures blue cannon
g.make_move('i10','i8') #blue chariot moves
g.make_move('g4','f4')
g.make_move('i8','f8') #blue chariot moves sideway
g.make_move('g3','h5')
g.make_move('h10','g8') #blue horse
g.make_move('d1','d2') #red moves
g.make_move('e9','e9') #blue passes
g.make_move('d2','d3') #red moves
g.make_move('e9','e9') #blue passes
self.assertTrue(g.make_move('b3','e3')) #red CHECKS blue using a cannon -- special test for checks using a cannon -- check here
try:
self.assertEqual(g.get_game_state().upper(),'UNFINISHED')
except:
self.fail("Game state should be UNFINISHED when a general is in check but not checkmated")
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("Red General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), True)
except:
self.fail("Blue General is in check with a Cannon outside the palace and is_in_check should return True for blue")
non_counter_check_move = g.make_move('f8','f7') #a move that does not evade the check
try:
self.assertFalse(non_counter_check_move)
except:
self.fail("Blue should be forced to make a move that evades a check when checked")
non_counter_check_move = g.make_move('e9','e10') #general moves not countering the check
try:
self.assertFalse(non_counter_check_move)
except:
self.fail("Blue should be forced to make a move that evades a check when checked")
non_counter_check_move = g.make_move('f6','f5') #soldier moves not countering the check
try:
self.assertFalse(non_counter_check_move)
except:
self.fail("Blue should be forced to make a move that evades a check when checked")
# @visibility('after_due_date')
def test_red_won(self):
"""RULES: Test that red win is detected correctly."""
g = JanggiGame()
g.make_move('c7','c6') #blue
g.make_move('c1','d3') #red
g.make_move('b10','d7') #blue
g.make_move('b3','e3') #red
g.make_move('c10','d8')
g.make_move('h1','g3')
g.make_move('e7','e6')
g.make_move('e3', 'e6') #red cannon captures soldier -- check here
g.make_move('h8','c8') #blue moves -- check here
g.make_move('d3','e5') #red
g.make_move('c8','c4') #blue cannon captures red soldier -- check here
g.make_move('e5','c4') #red horse captures blue cannon
g.make_move('i10','i8') #blue chariot moves
g.make_move('g4','f4')
g.make_move('i8','f8') #blue chariot moves sideway
g.make_move('g3','h5')
g.make_move('h10','g8') #blue horse
g.make_move('e6','e3') #red CHECKS blue using a cannon -- special test for checks using a cannon -- check here
g.make_move('e9','d9') #blue moves
g.make_move('c4','e5') #red
g.make_move('c6','d6')
g.make_move('e5','c4')
g.make_move('a7','a6')#blue
g.make_move('h3', 'h9') #red cannon moves to a position where it COULD Check but has not. -- check here
g.make_move('a10','a7')
g.make_move('c4','d6') #red horse captures blue soldier
g.make_move('a6','b6')
g.make_move('h5','g7')
g.make_move('b8','b1')#blue cannon captures red elephant
g.make_move('a1','b1') #red chariot captures blue cannon
g.make_move('a7','a4')
g.make_move('b1','c1')
g.make_move('a4','a2') #blue CHECKS red using a chariot -- check here
g.make_move('e2','e1') #red general moves to avoid capture -- check after this
g.make_move('i7','h7') #blue moves
g.make_move('c1','c9') #red chariot moves to the palace to CHECKMATE blue
try:
self.assertIs(g.is_in_check('red'), False)
except:
self.fail("Red General is not in check and yet is_in_check returns True for red")
try:
self.assertIs(g.is_in_check('blue'), True)
except:
self.fail("Blue General is in check and is_in_check should return True for blue")
try:
self.assertEqual(g.get_game_state().upper(),'RED_WON')
except:
self.fail("Game state should be RED_WON when the BLUE general is checkmated")
| 42.540541
| 148
| 0.610588
| 10,515
| 72,404
| 4.029957
| 0.029196
| 0.114596
| 0.128496
| 0.035587
| 0.929699
| 0.900696
| 0.862655
| 0.830301
| 0.793345
| 0.764673
| 0
| 0.024476
| 0.265869
| 72,404
| 1,701
| 149
| 42.56555
| 0.772736
| 0.245387
| 0
| 0.793292
| 0
| 0
| 0.192388
| 0
| 0
| 0
| 0
| 0
| 0.163027
| 1
| 0.051482
| false
| 0.022621
| 0.00156
| 0
| 0.053822
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0fa7912ed4a573996c7dc4c4b9f4871605c2b21a
| 1,488
|
py
|
Python
|
tests/2d/numpy/methods.py
|
oojBuffalo/micropython-ulab
|
4407bec88c3a7585ffbdfdd98e72bed12329ff3c
|
[
"MIT"
] | 232
|
2019-10-30T02:47:59.000Z
|
2022-03-29T13:35:42.000Z
|
tests/2d/numpy/methods.py
|
oojBuffalo/micropython-ulab
|
4407bec88c3a7585ffbdfdd98e72bed12329ff3c
|
[
"MIT"
] | 325
|
2019-10-25T00:27:29.000Z
|
2022-03-16T19:47:45.000Z
|
tests/2d/numpy/methods.py
|
oojBuffalo/micropython-ulab
|
4407bec88c3a7585ffbdfdd98e72bed12329ff3c
|
[
"MIT"
] | 73
|
2019-11-04T19:31:22.000Z
|
2022-03-10T03:11:41.000Z
|
try:
from ulab import numpy as np
except ImportError:
import numpy as np
a = np.array([1, 2, 3, 4], dtype=np.int8)
b = a.copy()
print(b)
a = np.array([[1,2,3],[4,5,6],[7,8,9]], dtype=np.int16)
b = a.copy()
print(b)
a = np.array([[1,2,3],[4,5,6],[7,8,9]], dtype=np.float)
b = a.copy()
print(b)
print(a.dtype)
print(a.flatten())
print(np.array([1,2,3], dtype=np.uint8).itemsize)
print(np.array([1,2,3], dtype=np.uint16).itemsize)
print(np.array([1,2,3], dtype=np.int8).itemsize)
print(np.array([1,2,3], dtype=np.int16).itemsize)
print(np.array([1,2,3], dtype=np.float).itemsize)
print(np.array([1,2,3], dtype=np.float).shape)
print(np.array([[1],[2],[3]], dtype=np.float).shape)
print(np.array([[1],[2],[3]], dtype=np.float).reshape((1,3)))
print(np.array([[1],[2],[3]]).size)
print(np.array([1,2,3], dtype=np.float).size)
print(np.array([1,2,3], dtype=np.uint8).tobytes())
print(np.array([1,2,3], dtype=np.int8).tobytes())
print(np.array([1,2,3], dtype=np.float).transpose().shape)
print(np.array([[1],[2],[3]], dtype=np.float).transpose().shape)
a = np.array([1, 2, 3, 4, 5, 6], dtype=np.uint8)
b = a.byteswap(inplace=False)
print(a)
print(b)
c = a.byteswap(inplace=True)
print(a)
print(c)
a = np.array([1, 2, 3, 4, 5, 6], dtype=np.uint16)
b = a.byteswap(inplace=False)
print(a)
print(b)
c = a.byteswap(inplace=True)
print(a)
print(c)
a = np.array([1, 2, 3, 4, 5, 6], dtype=np.float)
b = a.byteswap(inplace=False)
print(a)
print(b)
c = a.byteswap(inplace=True)
print(a)
print(c)
| 28.615385
| 64
| 0.640457
| 305
| 1,488
| 3.12459
| 0.134426
| 0.146905
| 0.167891
| 0.188877
| 0.864638
| 0.841553
| 0.825813
| 0.813221
| 0.813221
| 0.579224
| 0
| 0.072432
| 0.090726
| 1,488
| 51
| 65
| 29.176471
| 0.631929
| 0
| 0
| 0.48
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.06
| 0
| 0.06
| 0.62
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
0ff1dbbf54aa4b910eb2f7724e37b77fcebc6885
| 175,657
|
py
|
Python
|
google/api/serviceusage/v1beta1/api-serviceusage-v1beta1-py/tests/unit/gapic/serviceusage_v1beta1/test_service_usage.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 7
|
2021-02-21T10:39:41.000Z
|
2021-12-07T07:31:28.000Z
|
google/api/serviceusage/v1beta1/api-serviceusage-v1beta1-py/tests/unit/gapic/serviceusage_v1beta1/test_service_usage.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 6
|
2021-02-02T23:46:11.000Z
|
2021-11-15T01:46:02.000Z
|
google/api/serviceusage/v1beta1/api-serviceusage-v1beta1-py/tests/unit/gapic/serviceusage_v1beta1/test_service_usage.py
|
googleapis/googleapis-gen
|
d84824c78563d59b0e58d5664bfaa430e9ad7e7a
|
[
"Apache-2.0"
] | 4
|
2021-01-28T23:25:45.000Z
|
2021-08-30T01:55:16.000Z
|
# -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import mock
import packaging.version
import grpc
from grpc.experimental import aio
import math
import pytest
from proto.marshal.rules.dates import DurationRule, TimestampRule
from google.api.serviceusage_v1beta1.services.service_usage import ServiceUsageAsyncClient
from google.api.serviceusage_v1beta1.services.service_usage import ServiceUsageClient
from google.api.serviceusage_v1beta1.services.service_usage import pagers
from google.api.serviceusage_v1beta1.services.service_usage import transports
from google.api.serviceusage_v1beta1.services.service_usage.transports.base import _GOOGLE_AUTH_VERSION
from google.api.serviceusage_v1beta1.types import resources
from google.api.serviceusage_v1beta1.types import serviceusage
from google.api_core import client_options
from google.api_core import exceptions as core_exceptions
from google.api_core import future
from google.api_core import gapic_v1
from google.api_core import grpc_helpers
from google.api_core import grpc_helpers_async
from google.api_core import operation_async # type: ignore
from google.api_core import operations_v1
from google.api_core import path_template
from google.auth import credentials as ga_credentials
from google.auth.exceptions import MutualTLSChannelError
from google.longrunning import operations_pb2
from google.oauth2 import service_account
from google.protobuf import field_mask_pb2 # type: ignore
import google.auth
# TODO(busunkim): Once google-auth >= 1.25.0 is required transitively
# through google-api-core:
# - Delete the auth "less than" test cases
# - Delete these pytest markers (Make the "greater than or equal to" tests the default).
requires_google_auth_lt_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) >= packaging.version.parse("1.25.0"),
reason="This test requires google-auth < 1.25.0",
)
requires_google_auth_gte_1_25_0 = pytest.mark.skipif(
packaging.version.parse(_GOOGLE_AUTH_VERSION) < packaging.version.parse("1.25.0"),
reason="This test requires google-auth >= 1.25.0",
)
def client_cert_source_callback():
return b"cert bytes", b"key bytes"
# If default endpoint is localhost, then default mtls endpoint will be the same.
# This method modifies the default endpoint so the client can produce a different
# mtls endpoint for endpoint testing purposes.
def modify_default_endpoint(client):
return "foo.googleapis.com" if ("localhost" in client.DEFAULT_ENDPOINT) else client.DEFAULT_ENDPOINT
def test__get_default_mtls_endpoint():
api_endpoint = "example.googleapis.com"
api_mtls_endpoint = "example.mtls.googleapis.com"
sandbox_endpoint = "example.sandbox.googleapis.com"
sandbox_mtls_endpoint = "example.mtls.sandbox.googleapis.com"
non_googleapi = "api.example.com"
assert ServiceUsageClient._get_default_mtls_endpoint(None) is None
assert ServiceUsageClient._get_default_mtls_endpoint(api_endpoint) == api_mtls_endpoint
assert ServiceUsageClient._get_default_mtls_endpoint(api_mtls_endpoint) == api_mtls_endpoint
assert ServiceUsageClient._get_default_mtls_endpoint(sandbox_endpoint) == sandbox_mtls_endpoint
assert ServiceUsageClient._get_default_mtls_endpoint(sandbox_mtls_endpoint) == sandbox_mtls_endpoint
assert ServiceUsageClient._get_default_mtls_endpoint(non_googleapi) == non_googleapi
@pytest.mark.parametrize("client_class", [
ServiceUsageClient,
ServiceUsageAsyncClient,
])
def test_service_usage_client_from_service_account_info(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(service_account.Credentials, 'from_service_account_info') as factory:
factory.return_value = creds
info = {"valid": True}
client = client_class.from_service_account_info(info)
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == 'serviceusage.googleapis.com:443'
@pytest.mark.parametrize("transport_class,transport_name", [
(transports.ServiceUsageGrpcTransport, "grpc"),
(transports.ServiceUsageGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_service_usage_client_service_account_always_use_jwt(transport_class, transport_name):
with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=True)
use_jwt.assert_called_once_with(True)
with mock.patch.object(service_account.Credentials, 'with_always_use_jwt_access', create=True) as use_jwt:
creds = service_account.Credentials(None, None, None)
transport = transport_class(credentials=creds, always_use_jwt_access=False)
use_jwt.assert_not_called()
@pytest.mark.parametrize("client_class", [
ServiceUsageClient,
ServiceUsageAsyncClient,
])
def test_service_usage_client_from_service_account_file(client_class):
creds = ga_credentials.AnonymousCredentials()
with mock.patch.object(service_account.Credentials, 'from_service_account_file') as factory:
factory.return_value = creds
client = client_class.from_service_account_file("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
client = client_class.from_service_account_json("dummy/file/path.json")
assert client.transport._credentials == creds
assert isinstance(client, client_class)
assert client.transport._host == 'serviceusage.googleapis.com:443'
def test_service_usage_client_get_transport_class():
transport = ServiceUsageClient.get_transport_class()
available_transports = [
transports.ServiceUsageGrpcTransport,
]
assert transport in available_transports
transport = ServiceUsageClient.get_transport_class("grpc")
assert transport == transports.ServiceUsageGrpcTransport
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(ServiceUsageClient, transports.ServiceUsageGrpcTransport, "grpc"),
(ServiceUsageAsyncClient, transports.ServiceUsageGrpcAsyncIOTransport, "grpc_asyncio"),
])
@mock.patch.object(ServiceUsageClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceUsageClient))
@mock.patch.object(ServiceUsageAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceUsageAsyncClient))
def test_service_usage_client_client_options(client_class, transport_class, transport_name):
# Check that if channel is provided we won't create a new one.
with mock.patch.object(ServiceUsageClient, 'get_transport_class') as gtc:
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials()
)
client = client_class(transport=transport)
gtc.assert_not_called()
# Check that if channel is provided via str we will create a new one.
with mock.patch.object(ServiceUsageClient, 'get_transport_class') as gtc:
client = client_class(transport=transport_name)
gtc.assert_called()
# Check the case api_endpoint is provided.
options = client_options.ClientOptions(api_endpoint="squid.clam.whelk")
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "never".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "never"}):
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT is
# "always".
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "always"}):
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_MTLS_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case api_endpoint is not provided and GOOGLE_API_USE_MTLS_ENDPOINT has
# unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "Unsupported"}):
with pytest.raises(MutualTLSChannelError):
client = client_class()
# Check the case GOOGLE_API_USE_CLIENT_CERTIFICATE has unsupported value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": "Unsupported"}):
with pytest.raises(ValueError):
client = client_class()
# Check the case quota_project_id is provided
options = client_options.ClientOptions(quota_project_id="octopus")
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id="octopus",
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name,use_client_cert_env", [
(ServiceUsageClient, transports.ServiceUsageGrpcTransport, "grpc", "true"),
(ServiceUsageAsyncClient, transports.ServiceUsageGrpcAsyncIOTransport, "grpc_asyncio", "true"),
(ServiceUsageClient, transports.ServiceUsageGrpcTransport, "grpc", "false"),
(ServiceUsageAsyncClient, transports.ServiceUsageGrpcAsyncIOTransport, "grpc_asyncio", "false"),
])
@mock.patch.object(ServiceUsageClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceUsageClient))
@mock.patch.object(ServiceUsageAsyncClient, "DEFAULT_ENDPOINT", modify_default_endpoint(ServiceUsageAsyncClient))
@mock.patch.dict(os.environ, {"GOOGLE_API_USE_MTLS_ENDPOINT": "auto"})
def test_service_usage_client_mtls_env_auto(client_class, transport_class, transport_name, use_client_cert_env):
# This tests the endpoint autoswitch behavior. Endpoint is autoswitched to the default
# mtls endpoint, if GOOGLE_API_USE_CLIENT_CERTIFICATE is "true" and client cert exists.
# Check the case client_cert_source is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
options = client_options.ClientOptions(client_cert_source=client_cert_source_callback)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
if use_client_cert_env == "false":
expected_client_cert_source = None
expected_host = client.DEFAULT_ENDPOINT
else:
expected_client_cert_source = client_cert_source_callback
expected_host = client.DEFAULT_MTLS_ENDPOINT
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case ADC client cert is provided. Whether client cert is used depends on
# GOOGLE_API_USE_CLIENT_CERTIFICATE value.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
with mock.patch.object(transport_class, '__init__') as patched:
with mock.patch('google.auth.transport.mtls.has_default_client_cert_source', return_value=True):
with mock.patch('google.auth.transport.mtls.default_client_cert_source', return_value=client_cert_source_callback):
if use_client_cert_env == "false":
expected_host = client.DEFAULT_ENDPOINT
expected_client_cert_source = None
else:
expected_host = client.DEFAULT_MTLS_ENDPOINT
expected_client_cert_source = client_cert_source_callback
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=expected_host,
scopes=None,
client_cert_source_for_mtls=expected_client_cert_source,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
# Check the case client_cert_source and ADC client cert are not provided.
with mock.patch.dict(os.environ, {"GOOGLE_API_USE_CLIENT_CERTIFICATE": use_client_cert_env}):
with mock.patch.object(transport_class, '__init__') as patched:
with mock.patch("google.auth.transport.mtls.has_default_client_cert_source", return_value=False):
patched.return_value = None
client = client_class()
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(ServiceUsageClient, transports.ServiceUsageGrpcTransport, "grpc"),
(ServiceUsageAsyncClient, transports.ServiceUsageGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_service_usage_client_client_options_scopes(client_class, transport_class, transport_name):
# Check the case scopes are provided.
options = client_options.ClientOptions(
scopes=["1", "2"],
)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file=None,
host=client.DEFAULT_ENDPOINT,
scopes=["1", "2"],
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
@pytest.mark.parametrize("client_class,transport_class,transport_name", [
(ServiceUsageClient, transports.ServiceUsageGrpcTransport, "grpc"),
(ServiceUsageAsyncClient, transports.ServiceUsageGrpcAsyncIOTransport, "grpc_asyncio"),
])
def test_service_usage_client_client_options_credentials_file(client_class, transport_class, transport_name):
# Check the case credentials file is provided.
options = client_options.ClientOptions(
credentials_file="credentials.json"
)
with mock.patch.object(transport_class, '__init__') as patched:
patched.return_value = None
client = client_class(client_options=options)
patched.assert_called_once_with(
credentials=None,
credentials_file="credentials.json",
host=client.DEFAULT_ENDPOINT,
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_service_usage_client_client_options_from_dict():
with mock.patch('google.api.serviceusage_v1beta1.services.service_usage.transports.ServiceUsageGrpcTransport.__init__') as grpc_transport:
grpc_transport.return_value = None
client = ServiceUsageClient(
client_options={'api_endpoint': 'squid.clam.whelk'}
)
grpc_transport.assert_called_once_with(
credentials=None,
credentials_file=None,
host="squid.clam.whelk",
scopes=None,
client_cert_source_for_mtls=None,
quota_project_id=None,
client_info=transports.base.DEFAULT_CLIENT_INFO,
always_use_jwt_access=True,
)
def test_enable_service(transport: str = 'grpc', request_type=serviceusage.EnableServiceRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.enable_service),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.enable_service(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.EnableServiceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_enable_service_from_dict():
test_enable_service(request_type=dict)
def test_enable_service_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.enable_service),
'__call__') as call:
client.enable_service()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.EnableServiceRequest()
@pytest.mark.asyncio
async def test_enable_service_async(transport: str = 'grpc_asyncio', request_type=serviceusage.EnableServiceRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.enable_service),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.enable_service(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.EnableServiceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_enable_service_async_from_dict():
await test_enable_service_async(request_type=dict)
def test_enable_service_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.EnableServiceRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.enable_service),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.enable_service(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_enable_service_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.EnableServiceRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.enable_service),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.enable_service(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_disable_service(transport: str = 'grpc', request_type=serviceusage.DisableServiceRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.disable_service),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.disable_service(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.DisableServiceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_disable_service_from_dict():
test_disable_service(request_type=dict)
def test_disable_service_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.disable_service),
'__call__') as call:
client.disable_service()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.DisableServiceRequest()
@pytest.mark.asyncio
async def test_disable_service_async(transport: str = 'grpc_asyncio', request_type=serviceusage.DisableServiceRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.disable_service),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.disable_service(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.DisableServiceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_disable_service_async_from_dict():
await test_disable_service_async(request_type=dict)
def test_disable_service_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.DisableServiceRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.disable_service),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.disable_service(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_disable_service_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.DisableServiceRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.disable_service),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.disable_service(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_get_service(transport: str = 'grpc', request_type=serviceusage.GetServiceRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_service),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = resources.Service(
name='name_value',
parent='parent_value',
state=resources.State.DISABLED,
)
response = client.get_service(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.GetServiceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Service)
assert response.name == 'name_value'
assert response.parent == 'parent_value'
assert response.state == resources.State.DISABLED
def test_get_service_from_dict():
test_get_service(request_type=dict)
def test_get_service_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_service),
'__call__') as call:
client.get_service()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.GetServiceRequest()
@pytest.mark.asyncio
async def test_get_service_async(transport: str = 'grpc_asyncio', request_type=serviceusage.GetServiceRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_service),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.Service(
name='name_value',
parent='parent_value',
state=resources.State.DISABLED,
))
response = await client.get_service(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.GetServiceRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.Service)
assert response.name == 'name_value'
assert response.parent == 'parent_value'
assert response.state == resources.State.DISABLED
@pytest.mark.asyncio
async def test_get_service_async_from_dict():
await test_get_service_async(request_type=dict)
def test_get_service_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.GetServiceRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_service),
'__call__') as call:
call.return_value = resources.Service()
client.get_service(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_get_service_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.GetServiceRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_service),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.Service())
await client.get_service(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_list_services(transport: str = 'grpc', request_type=serviceusage.ListServicesRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_services),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = serviceusage.ListServicesResponse(
next_page_token='next_page_token_value',
)
response = client.list_services(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ListServicesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListServicesPager)
assert response.next_page_token == 'next_page_token_value'
def test_list_services_from_dict():
test_list_services(request_type=dict)
def test_list_services_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_services),
'__call__') as call:
client.list_services()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ListServicesRequest()
@pytest.mark.asyncio
async def test_list_services_async(transport: str = 'grpc_asyncio', request_type=serviceusage.ListServicesRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_services),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(serviceusage.ListServicesResponse(
next_page_token='next_page_token_value',
))
response = await client.list_services(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ListServicesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListServicesAsyncPager)
assert response.next_page_token == 'next_page_token_value'
@pytest.mark.asyncio
async def test_list_services_async_from_dict():
await test_list_services_async(request_type=dict)
def test_list_services_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.ListServicesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_services),
'__call__') as call:
call.return_value = serviceusage.ListServicesResponse()
client.list_services(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_list_services_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.ListServicesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_services),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(serviceusage.ListServicesResponse())
await client.list_services(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_list_services_pager():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_services),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListServicesResponse(
services=[
resources.Service(),
resources.Service(),
resources.Service(),
],
next_page_token='abc',
),
serviceusage.ListServicesResponse(
services=[],
next_page_token='def',
),
serviceusage.ListServicesResponse(
services=[
resources.Service(),
],
next_page_token='ghi',
),
serviceusage.ListServicesResponse(
services=[
resources.Service(),
resources.Service(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('parent', ''),
)),
)
pager = client.list_services(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, resources.Service)
for i in results)
def test_list_services_pages():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_services),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListServicesResponse(
services=[
resources.Service(),
resources.Service(),
resources.Service(),
],
next_page_token='abc',
),
serviceusage.ListServicesResponse(
services=[],
next_page_token='def',
),
serviceusage.ListServicesResponse(
services=[
resources.Service(),
],
next_page_token='ghi',
),
serviceusage.ListServicesResponse(
services=[
resources.Service(),
resources.Service(),
],
),
RuntimeError,
)
pages = list(client.list_services(request={}).pages)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_services_async_pager():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_services),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListServicesResponse(
services=[
resources.Service(),
resources.Service(),
resources.Service(),
],
next_page_token='abc',
),
serviceusage.ListServicesResponse(
services=[],
next_page_token='def',
),
serviceusage.ListServicesResponse(
services=[
resources.Service(),
],
next_page_token='ghi',
),
serviceusage.ListServicesResponse(
services=[
resources.Service(),
resources.Service(),
],
),
RuntimeError,
)
async_pager = await client.list_services(request={},)
assert async_pager.next_page_token == 'abc'
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, resources.Service)
for i in responses)
@pytest.mark.asyncio
async def test_list_services_async_pages():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_services),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListServicesResponse(
services=[
resources.Service(),
resources.Service(),
resources.Service(),
],
next_page_token='abc',
),
serviceusage.ListServicesResponse(
services=[],
next_page_token='def',
),
serviceusage.ListServicesResponse(
services=[
resources.Service(),
],
next_page_token='ghi',
),
serviceusage.ListServicesResponse(
services=[
resources.Service(),
resources.Service(),
],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_services(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
def test_batch_enable_services(transport: str = 'grpc', request_type=serviceusage.BatchEnableServicesRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_enable_services),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.batch_enable_services(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.BatchEnableServicesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_batch_enable_services_from_dict():
test_batch_enable_services(request_type=dict)
def test_batch_enable_services_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_enable_services),
'__call__') as call:
client.batch_enable_services()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.BatchEnableServicesRequest()
@pytest.mark.asyncio
async def test_batch_enable_services_async(transport: str = 'grpc_asyncio', request_type=serviceusage.BatchEnableServicesRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_enable_services),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.batch_enable_services(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.BatchEnableServicesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_batch_enable_services_async_from_dict():
await test_batch_enable_services_async(request_type=dict)
def test_batch_enable_services_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.BatchEnableServicesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_enable_services),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.batch_enable_services(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_batch_enable_services_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.BatchEnableServicesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.batch_enable_services),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.batch_enable_services(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_list_consumer_quota_metrics(transport: str = 'grpc', request_type=serviceusage.ListConsumerQuotaMetricsRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_quota_metrics),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = serviceusage.ListConsumerQuotaMetricsResponse(
next_page_token='next_page_token_value',
)
response = client.list_consumer_quota_metrics(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ListConsumerQuotaMetricsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListConsumerQuotaMetricsPager)
assert response.next_page_token == 'next_page_token_value'
def test_list_consumer_quota_metrics_from_dict():
test_list_consumer_quota_metrics(request_type=dict)
def test_list_consumer_quota_metrics_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_quota_metrics),
'__call__') as call:
client.list_consumer_quota_metrics()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ListConsumerQuotaMetricsRequest()
@pytest.mark.asyncio
async def test_list_consumer_quota_metrics_async(transport: str = 'grpc_asyncio', request_type=serviceusage.ListConsumerQuotaMetricsRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_quota_metrics),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(serviceusage.ListConsumerQuotaMetricsResponse(
next_page_token='next_page_token_value',
))
response = await client.list_consumer_quota_metrics(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ListConsumerQuotaMetricsRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListConsumerQuotaMetricsAsyncPager)
assert response.next_page_token == 'next_page_token_value'
@pytest.mark.asyncio
async def test_list_consumer_quota_metrics_async_from_dict():
await test_list_consumer_quota_metrics_async(request_type=dict)
def test_list_consumer_quota_metrics_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.ListConsumerQuotaMetricsRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_quota_metrics),
'__call__') as call:
call.return_value = serviceusage.ListConsumerQuotaMetricsResponse()
client.list_consumer_quota_metrics(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_list_consumer_quota_metrics_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.ListConsumerQuotaMetricsRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_quota_metrics),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(serviceusage.ListConsumerQuotaMetricsResponse())
await client.list_consumer_quota_metrics(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_list_consumer_quota_metrics_pager():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_quota_metrics),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[
resources.ConsumerQuotaMetric(),
resources.ConsumerQuotaMetric(),
resources.ConsumerQuotaMetric(),
],
next_page_token='abc',
),
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[],
next_page_token='def',
),
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[
resources.ConsumerQuotaMetric(),
],
next_page_token='ghi',
),
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[
resources.ConsumerQuotaMetric(),
resources.ConsumerQuotaMetric(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('parent', ''),
)),
)
pager = client.list_consumer_quota_metrics(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, resources.ConsumerQuotaMetric)
for i in results)
def test_list_consumer_quota_metrics_pages():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_quota_metrics),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[
resources.ConsumerQuotaMetric(),
resources.ConsumerQuotaMetric(),
resources.ConsumerQuotaMetric(),
],
next_page_token='abc',
),
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[],
next_page_token='def',
),
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[
resources.ConsumerQuotaMetric(),
],
next_page_token='ghi',
),
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[
resources.ConsumerQuotaMetric(),
resources.ConsumerQuotaMetric(),
],
),
RuntimeError,
)
pages = list(client.list_consumer_quota_metrics(request={}).pages)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_consumer_quota_metrics_async_pager():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_quota_metrics),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[
resources.ConsumerQuotaMetric(),
resources.ConsumerQuotaMetric(),
resources.ConsumerQuotaMetric(),
],
next_page_token='abc',
),
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[],
next_page_token='def',
),
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[
resources.ConsumerQuotaMetric(),
],
next_page_token='ghi',
),
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[
resources.ConsumerQuotaMetric(),
resources.ConsumerQuotaMetric(),
],
),
RuntimeError,
)
async_pager = await client.list_consumer_quota_metrics(request={},)
assert async_pager.next_page_token == 'abc'
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, resources.ConsumerQuotaMetric)
for i in responses)
@pytest.mark.asyncio
async def test_list_consumer_quota_metrics_async_pages():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_quota_metrics),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[
resources.ConsumerQuotaMetric(),
resources.ConsumerQuotaMetric(),
resources.ConsumerQuotaMetric(),
],
next_page_token='abc',
),
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[],
next_page_token='def',
),
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[
resources.ConsumerQuotaMetric(),
],
next_page_token='ghi',
),
serviceusage.ListConsumerQuotaMetricsResponse(
metrics=[
resources.ConsumerQuotaMetric(),
resources.ConsumerQuotaMetric(),
],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_consumer_quota_metrics(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
def test_get_consumer_quota_metric(transport: str = 'grpc', request_type=serviceusage.GetConsumerQuotaMetricRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_consumer_quota_metric),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = resources.ConsumerQuotaMetric(
name='name_value',
metric='metric_value',
display_name='display_name_value',
unit='unit_value',
)
response = client.get_consumer_quota_metric(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.GetConsumerQuotaMetricRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.ConsumerQuotaMetric)
assert response.name == 'name_value'
assert response.metric == 'metric_value'
assert response.display_name == 'display_name_value'
assert response.unit == 'unit_value'
def test_get_consumer_quota_metric_from_dict():
test_get_consumer_quota_metric(request_type=dict)
def test_get_consumer_quota_metric_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_consumer_quota_metric),
'__call__') as call:
client.get_consumer_quota_metric()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.GetConsumerQuotaMetricRequest()
@pytest.mark.asyncio
async def test_get_consumer_quota_metric_async(transport: str = 'grpc_asyncio', request_type=serviceusage.GetConsumerQuotaMetricRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_consumer_quota_metric),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.ConsumerQuotaMetric(
name='name_value',
metric='metric_value',
display_name='display_name_value',
unit='unit_value',
))
response = await client.get_consumer_quota_metric(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.GetConsumerQuotaMetricRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.ConsumerQuotaMetric)
assert response.name == 'name_value'
assert response.metric == 'metric_value'
assert response.display_name == 'display_name_value'
assert response.unit == 'unit_value'
@pytest.mark.asyncio
async def test_get_consumer_quota_metric_async_from_dict():
await test_get_consumer_quota_metric_async(request_type=dict)
def test_get_consumer_quota_metric_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.GetConsumerQuotaMetricRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_consumer_quota_metric),
'__call__') as call:
call.return_value = resources.ConsumerQuotaMetric()
client.get_consumer_quota_metric(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_get_consumer_quota_metric_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.GetConsumerQuotaMetricRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_consumer_quota_metric),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.ConsumerQuotaMetric())
await client.get_consumer_quota_metric(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_get_consumer_quota_limit(transport: str = 'grpc', request_type=serviceusage.GetConsumerQuotaLimitRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_consumer_quota_limit),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = resources.ConsumerQuotaLimit(
name='name_value',
metric='metric_value',
unit='unit_value',
is_precise=True,
allows_admin_overrides=True,
)
response = client.get_consumer_quota_limit(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.GetConsumerQuotaLimitRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.ConsumerQuotaLimit)
assert response.name == 'name_value'
assert response.metric == 'metric_value'
assert response.unit == 'unit_value'
assert response.is_precise is True
assert response.allows_admin_overrides is True
def test_get_consumer_quota_limit_from_dict():
test_get_consumer_quota_limit(request_type=dict)
def test_get_consumer_quota_limit_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_consumer_quota_limit),
'__call__') as call:
client.get_consumer_quota_limit()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.GetConsumerQuotaLimitRequest()
@pytest.mark.asyncio
async def test_get_consumer_quota_limit_async(transport: str = 'grpc_asyncio', request_type=serviceusage.GetConsumerQuotaLimitRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_consumer_quota_limit),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(resources.ConsumerQuotaLimit(
name='name_value',
metric='metric_value',
unit='unit_value',
is_precise=True,
allows_admin_overrides=True,
))
response = await client.get_consumer_quota_limit(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.GetConsumerQuotaLimitRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, resources.ConsumerQuotaLimit)
assert response.name == 'name_value'
assert response.metric == 'metric_value'
assert response.unit == 'unit_value'
assert response.is_precise is True
assert response.allows_admin_overrides is True
@pytest.mark.asyncio
async def test_get_consumer_quota_limit_async_from_dict():
await test_get_consumer_quota_limit_async(request_type=dict)
def test_get_consumer_quota_limit_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.GetConsumerQuotaLimitRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_consumer_quota_limit),
'__call__') as call:
call.return_value = resources.ConsumerQuotaLimit()
client.get_consumer_quota_limit(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_get_consumer_quota_limit_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.GetConsumerQuotaLimitRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.get_consumer_quota_limit),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(resources.ConsumerQuotaLimit())
await client.get_consumer_quota_limit(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_create_admin_override(transport: str = 'grpc', request_type=serviceusage.CreateAdminOverrideRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_admin_override),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.create_admin_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.CreateAdminOverrideRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_create_admin_override_from_dict():
test_create_admin_override(request_type=dict)
def test_create_admin_override_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_admin_override),
'__call__') as call:
client.create_admin_override()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.CreateAdminOverrideRequest()
@pytest.mark.asyncio
async def test_create_admin_override_async(transport: str = 'grpc_asyncio', request_type=serviceusage.CreateAdminOverrideRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_admin_override),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.create_admin_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.CreateAdminOverrideRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_create_admin_override_async_from_dict():
await test_create_admin_override_async(request_type=dict)
def test_create_admin_override_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.CreateAdminOverrideRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_admin_override),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.create_admin_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_create_admin_override_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.CreateAdminOverrideRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_admin_override),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.create_admin_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_update_admin_override(transport: str = 'grpc', request_type=serviceusage.UpdateAdminOverrideRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_admin_override),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.update_admin_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.UpdateAdminOverrideRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_update_admin_override_from_dict():
test_update_admin_override(request_type=dict)
def test_update_admin_override_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_admin_override),
'__call__') as call:
client.update_admin_override()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.UpdateAdminOverrideRequest()
@pytest.mark.asyncio
async def test_update_admin_override_async(transport: str = 'grpc_asyncio', request_type=serviceusage.UpdateAdminOverrideRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_admin_override),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.update_admin_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.UpdateAdminOverrideRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_update_admin_override_async_from_dict():
await test_update_admin_override_async(request_type=dict)
def test_update_admin_override_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.UpdateAdminOverrideRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_admin_override),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.update_admin_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_update_admin_override_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.UpdateAdminOverrideRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_admin_override),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.update_admin_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_delete_admin_override(transport: str = 'grpc', request_type=serviceusage.DeleteAdminOverrideRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_admin_override),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.delete_admin_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.DeleteAdminOverrideRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_delete_admin_override_from_dict():
test_delete_admin_override(request_type=dict)
def test_delete_admin_override_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_admin_override),
'__call__') as call:
client.delete_admin_override()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.DeleteAdminOverrideRequest()
@pytest.mark.asyncio
async def test_delete_admin_override_async(transport: str = 'grpc_asyncio', request_type=serviceusage.DeleteAdminOverrideRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_admin_override),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.delete_admin_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.DeleteAdminOverrideRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_delete_admin_override_async_from_dict():
await test_delete_admin_override_async(request_type=dict)
def test_delete_admin_override_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.DeleteAdminOverrideRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_admin_override),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.delete_admin_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_delete_admin_override_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.DeleteAdminOverrideRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_admin_override),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.delete_admin_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_list_admin_overrides(transport: str = 'grpc', request_type=serviceusage.ListAdminOverridesRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_admin_overrides),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = serviceusage.ListAdminOverridesResponse(
next_page_token='next_page_token_value',
)
response = client.list_admin_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ListAdminOverridesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListAdminOverridesPager)
assert response.next_page_token == 'next_page_token_value'
def test_list_admin_overrides_from_dict():
test_list_admin_overrides(request_type=dict)
def test_list_admin_overrides_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_admin_overrides),
'__call__') as call:
client.list_admin_overrides()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ListAdminOverridesRequest()
@pytest.mark.asyncio
async def test_list_admin_overrides_async(transport: str = 'grpc_asyncio', request_type=serviceusage.ListAdminOverridesRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_admin_overrides),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(serviceusage.ListAdminOverridesResponse(
next_page_token='next_page_token_value',
))
response = await client.list_admin_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ListAdminOverridesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListAdminOverridesAsyncPager)
assert response.next_page_token == 'next_page_token_value'
@pytest.mark.asyncio
async def test_list_admin_overrides_async_from_dict():
await test_list_admin_overrides_async(request_type=dict)
def test_list_admin_overrides_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.ListAdminOverridesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_admin_overrides),
'__call__') as call:
call.return_value = serviceusage.ListAdminOverridesResponse()
client.list_admin_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_list_admin_overrides_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.ListAdminOverridesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_admin_overrides),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(serviceusage.ListAdminOverridesResponse())
await client.list_admin_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_list_admin_overrides_pager():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_admin_overrides),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListAdminOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
resources.QuotaOverride(),
],
next_page_token='abc',
),
serviceusage.ListAdminOverridesResponse(
overrides=[],
next_page_token='def',
),
serviceusage.ListAdminOverridesResponse(
overrides=[
resources.QuotaOverride(),
],
next_page_token='ghi',
),
serviceusage.ListAdminOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('parent', ''),
)),
)
pager = client.list_admin_overrides(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, resources.QuotaOverride)
for i in results)
def test_list_admin_overrides_pages():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_admin_overrides),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListAdminOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
resources.QuotaOverride(),
],
next_page_token='abc',
),
serviceusage.ListAdminOverridesResponse(
overrides=[],
next_page_token='def',
),
serviceusage.ListAdminOverridesResponse(
overrides=[
resources.QuotaOverride(),
],
next_page_token='ghi',
),
serviceusage.ListAdminOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
],
),
RuntimeError,
)
pages = list(client.list_admin_overrides(request={}).pages)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_admin_overrides_async_pager():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_admin_overrides),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListAdminOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
resources.QuotaOverride(),
],
next_page_token='abc',
),
serviceusage.ListAdminOverridesResponse(
overrides=[],
next_page_token='def',
),
serviceusage.ListAdminOverridesResponse(
overrides=[
resources.QuotaOverride(),
],
next_page_token='ghi',
),
serviceusage.ListAdminOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
],
),
RuntimeError,
)
async_pager = await client.list_admin_overrides(request={},)
assert async_pager.next_page_token == 'abc'
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, resources.QuotaOverride)
for i in responses)
@pytest.mark.asyncio
async def test_list_admin_overrides_async_pages():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_admin_overrides),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListAdminOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
resources.QuotaOverride(),
],
next_page_token='abc',
),
serviceusage.ListAdminOverridesResponse(
overrides=[],
next_page_token='def',
),
serviceusage.ListAdminOverridesResponse(
overrides=[
resources.QuotaOverride(),
],
next_page_token='ghi',
),
serviceusage.ListAdminOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_admin_overrides(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
def test_import_admin_overrides(transport: str = 'grpc', request_type=serviceusage.ImportAdminOverridesRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_admin_overrides),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.import_admin_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ImportAdminOverridesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_import_admin_overrides_from_dict():
test_import_admin_overrides(request_type=dict)
def test_import_admin_overrides_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_admin_overrides),
'__call__') as call:
client.import_admin_overrides()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ImportAdminOverridesRequest()
@pytest.mark.asyncio
async def test_import_admin_overrides_async(transport: str = 'grpc_asyncio', request_type=serviceusage.ImportAdminOverridesRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_admin_overrides),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.import_admin_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ImportAdminOverridesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_import_admin_overrides_async_from_dict():
await test_import_admin_overrides_async(request_type=dict)
def test_import_admin_overrides_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.ImportAdminOverridesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_admin_overrides),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.import_admin_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_import_admin_overrides_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.ImportAdminOverridesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_admin_overrides),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.import_admin_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_create_consumer_override(transport: str = 'grpc', request_type=serviceusage.CreateConsumerOverrideRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_consumer_override),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.create_consumer_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.CreateConsumerOverrideRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_create_consumer_override_from_dict():
test_create_consumer_override(request_type=dict)
def test_create_consumer_override_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_consumer_override),
'__call__') as call:
client.create_consumer_override()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.CreateConsumerOverrideRequest()
@pytest.mark.asyncio
async def test_create_consumer_override_async(transport: str = 'grpc_asyncio', request_type=serviceusage.CreateConsumerOverrideRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_consumer_override),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.create_consumer_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.CreateConsumerOverrideRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_create_consumer_override_async_from_dict():
await test_create_consumer_override_async(request_type=dict)
def test_create_consumer_override_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.CreateConsumerOverrideRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_consumer_override),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.create_consumer_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_create_consumer_override_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.CreateConsumerOverrideRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.create_consumer_override),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.create_consumer_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_update_consumer_override(transport: str = 'grpc', request_type=serviceusage.UpdateConsumerOverrideRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_consumer_override),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.update_consumer_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.UpdateConsumerOverrideRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_update_consumer_override_from_dict():
test_update_consumer_override(request_type=dict)
def test_update_consumer_override_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_consumer_override),
'__call__') as call:
client.update_consumer_override()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.UpdateConsumerOverrideRequest()
@pytest.mark.asyncio
async def test_update_consumer_override_async(transport: str = 'grpc_asyncio', request_type=serviceusage.UpdateConsumerOverrideRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_consumer_override),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.update_consumer_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.UpdateConsumerOverrideRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_update_consumer_override_async_from_dict():
await test_update_consumer_override_async(request_type=dict)
def test_update_consumer_override_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.UpdateConsumerOverrideRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_consumer_override),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.update_consumer_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_update_consumer_override_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.UpdateConsumerOverrideRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.update_consumer_override),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.update_consumer_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_delete_consumer_override(transport: str = 'grpc', request_type=serviceusage.DeleteConsumerOverrideRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_consumer_override),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.delete_consumer_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.DeleteConsumerOverrideRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_delete_consumer_override_from_dict():
test_delete_consumer_override(request_type=dict)
def test_delete_consumer_override_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_consumer_override),
'__call__') as call:
client.delete_consumer_override()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.DeleteConsumerOverrideRequest()
@pytest.mark.asyncio
async def test_delete_consumer_override_async(transport: str = 'grpc_asyncio', request_type=serviceusage.DeleteConsumerOverrideRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_consumer_override),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.delete_consumer_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.DeleteConsumerOverrideRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_delete_consumer_override_async_from_dict():
await test_delete_consumer_override_async(request_type=dict)
def test_delete_consumer_override_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.DeleteConsumerOverrideRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_consumer_override),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.delete_consumer_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_delete_consumer_override_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.DeleteConsumerOverrideRequest()
request.name = 'name/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.delete_consumer_override),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.delete_consumer_override(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'name=name/value',
) in kw['metadata']
def test_list_consumer_overrides(transport: str = 'grpc', request_type=serviceusage.ListConsumerOverridesRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_overrides),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = serviceusage.ListConsumerOverridesResponse(
next_page_token='next_page_token_value',
)
response = client.list_consumer_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ListConsumerOverridesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListConsumerOverridesPager)
assert response.next_page_token == 'next_page_token_value'
def test_list_consumer_overrides_from_dict():
test_list_consumer_overrides(request_type=dict)
def test_list_consumer_overrides_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_overrides),
'__call__') as call:
client.list_consumer_overrides()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ListConsumerOverridesRequest()
@pytest.mark.asyncio
async def test_list_consumer_overrides_async(transport: str = 'grpc_asyncio', request_type=serviceusage.ListConsumerOverridesRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_overrides),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value =grpc_helpers_async.FakeUnaryUnaryCall(serviceusage.ListConsumerOverridesResponse(
next_page_token='next_page_token_value',
))
response = await client.list_consumer_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ListConsumerOverridesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, pagers.ListConsumerOverridesAsyncPager)
assert response.next_page_token == 'next_page_token_value'
@pytest.mark.asyncio
async def test_list_consumer_overrides_async_from_dict():
await test_list_consumer_overrides_async(request_type=dict)
def test_list_consumer_overrides_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.ListConsumerOverridesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_overrides),
'__call__') as call:
call.return_value = serviceusage.ListConsumerOverridesResponse()
client.list_consumer_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_list_consumer_overrides_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.ListConsumerOverridesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_overrides),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(serviceusage.ListConsumerOverridesResponse())
await client.list_consumer_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_list_consumer_overrides_pager():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_overrides),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListConsumerOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
resources.QuotaOverride(),
],
next_page_token='abc',
),
serviceusage.ListConsumerOverridesResponse(
overrides=[],
next_page_token='def',
),
serviceusage.ListConsumerOverridesResponse(
overrides=[
resources.QuotaOverride(),
],
next_page_token='ghi',
),
serviceusage.ListConsumerOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
],
),
RuntimeError,
)
metadata = ()
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata((
('parent', ''),
)),
)
pager = client.list_consumer_overrides(request={})
assert pager._metadata == metadata
results = [i for i in pager]
assert len(results) == 6
assert all(isinstance(i, resources.QuotaOverride)
for i in results)
def test_list_consumer_overrides_pages():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_overrides),
'__call__') as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListConsumerOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
resources.QuotaOverride(),
],
next_page_token='abc',
),
serviceusage.ListConsumerOverridesResponse(
overrides=[],
next_page_token='def',
),
serviceusage.ListConsumerOverridesResponse(
overrides=[
resources.QuotaOverride(),
],
next_page_token='ghi',
),
serviceusage.ListConsumerOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
],
),
RuntimeError,
)
pages = list(client.list_consumer_overrides(request={}).pages)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
@pytest.mark.asyncio
async def test_list_consumer_overrides_async_pager():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_overrides),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListConsumerOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
resources.QuotaOverride(),
],
next_page_token='abc',
),
serviceusage.ListConsumerOverridesResponse(
overrides=[],
next_page_token='def',
),
serviceusage.ListConsumerOverridesResponse(
overrides=[
resources.QuotaOverride(),
],
next_page_token='ghi',
),
serviceusage.ListConsumerOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
],
),
RuntimeError,
)
async_pager = await client.list_consumer_overrides(request={},)
assert async_pager.next_page_token == 'abc'
responses = []
async for response in async_pager:
responses.append(response)
assert len(responses) == 6
assert all(isinstance(i, resources.QuotaOverride)
for i in responses)
@pytest.mark.asyncio
async def test_list_consumer_overrides_async_pages():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials,
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.list_consumer_overrides),
'__call__', new_callable=mock.AsyncMock) as call:
# Set the response to a series of pages.
call.side_effect = (
serviceusage.ListConsumerOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
resources.QuotaOverride(),
],
next_page_token='abc',
),
serviceusage.ListConsumerOverridesResponse(
overrides=[],
next_page_token='def',
),
serviceusage.ListConsumerOverridesResponse(
overrides=[
resources.QuotaOverride(),
],
next_page_token='ghi',
),
serviceusage.ListConsumerOverridesResponse(
overrides=[
resources.QuotaOverride(),
resources.QuotaOverride(),
],
),
RuntimeError,
)
pages = []
async for page_ in (await client.list_consumer_overrides(request={})).pages:
pages.append(page_)
for page_, token in zip(pages, ['abc','def','ghi', '']):
assert page_.raw_page.next_page_token == token
def test_import_consumer_overrides(transport: str = 'grpc', request_type=serviceusage.ImportConsumerOverridesRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_consumer_overrides),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.import_consumer_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ImportConsumerOverridesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_import_consumer_overrides_from_dict():
test_import_consumer_overrides(request_type=dict)
def test_import_consumer_overrides_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_consumer_overrides),
'__call__') as call:
client.import_consumer_overrides()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ImportConsumerOverridesRequest()
@pytest.mark.asyncio
async def test_import_consumer_overrides_async(transport: str = 'grpc_asyncio', request_type=serviceusage.ImportConsumerOverridesRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_consumer_overrides),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.import_consumer_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.ImportConsumerOverridesRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_import_consumer_overrides_async_from_dict():
await test_import_consumer_overrides_async(request_type=dict)
def test_import_consumer_overrides_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.ImportConsumerOverridesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_consumer_overrides),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.import_consumer_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_import_consumer_overrides_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.ImportConsumerOverridesRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.import_consumer_overrides),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.import_consumer_overrides(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_generate_service_identity(transport: str = 'grpc', request_type=serviceusage.GenerateServiceIdentityRequest):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.generate_service_identity),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = operations_pb2.Operation(name='operations/spam')
response = client.generate_service_identity(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.GenerateServiceIdentityRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
def test_generate_service_identity_from_dict():
test_generate_service_identity(request_type=dict)
def test_generate_service_identity_empty_call():
# This test is a coverage failsafe to make sure that totally empty calls,
# i.e. request == None and no flattened fields passed, work.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.generate_service_identity),
'__call__') as call:
client.generate_service_identity()
call.assert_called()
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.GenerateServiceIdentityRequest()
@pytest.mark.asyncio
async def test_generate_service_identity_async(transport: str = 'grpc_asyncio', request_type=serviceusage.GenerateServiceIdentityRequest):
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# Everything is optional in proto3 as far as the runtime is concerned,
# and we are mocking out the actual API, so just send an empty request.
request = request_type()
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.generate_service_identity),
'__call__') as call:
# Designate an appropriate return value for the call.
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(
operations_pb2.Operation(name='operations/spam')
)
response = await client.generate_service_identity(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == serviceusage.GenerateServiceIdentityRequest()
# Establish that the response is the type that we expect.
assert isinstance(response, future.Future)
@pytest.mark.asyncio
async def test_generate_service_identity_async_from_dict():
await test_generate_service_identity_async(request_type=dict)
def test_generate_service_identity_field_headers():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.GenerateServiceIdentityRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.generate_service_identity),
'__call__') as call:
call.return_value = operations_pb2.Operation(name='operations/op')
client.generate_service_identity(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls) == 1
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
@pytest.mark.asyncio
async def test_generate_service_identity_field_headers_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
)
# Any value that is part of the HTTP/1.1 URI should be sent as
# a field header. Set these to a non-empty value.
request = serviceusage.GenerateServiceIdentityRequest()
request.parent = 'parent/value'
# Mock the actual call within the gRPC stub, and fake the request.
with mock.patch.object(
type(client.transport.generate_service_identity),
'__call__') as call:
call.return_value = grpc_helpers_async.FakeUnaryUnaryCall(operations_pb2.Operation(name='operations/op'))
await client.generate_service_identity(request)
# Establish that the underlying gRPC stub method was called.
assert len(call.mock_calls)
_, args, _ = call.mock_calls[0]
assert args[0] == request
# Establish that the field header was sent.
_, _, kw = call.mock_calls[0]
assert (
'x-goog-request-params',
'parent=parent/value',
) in kw['metadata']
def test_credentials_transport_error():
# It is an error to provide credentials and a transport instance.
transport = transports.ServiceUsageGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport,
)
# It is an error to provide a credentials file and a transport instance.
transport = transports.ServiceUsageGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ServiceUsageClient(
client_options={"credentials_file": "credentials.json"},
transport=transport,
)
# It is an error to provide scopes and a transport instance.
transport = transports.ServiceUsageGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
with pytest.raises(ValueError):
client = ServiceUsageClient(
client_options={"scopes": ["1", "2"]},
transport=transport,
)
def test_transport_instance():
# A client may be instantiated with a custom transport instance.
transport = transports.ServiceUsageGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
client = ServiceUsageClient(transport=transport)
assert client.transport is transport
def test_transport_get_channel():
# A client may be instantiated with a custom transport instance.
transport = transports.ServiceUsageGrpcTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
transport = transports.ServiceUsageGrpcAsyncIOTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
channel = transport.grpc_channel
assert channel
@pytest.mark.parametrize("transport_class", [
transports.ServiceUsageGrpcTransport,
transports.ServiceUsageGrpcAsyncIOTransport,
])
def test_transport_adc(transport_class):
# Test default credentials are used if not provided.
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class()
adc.assert_called_once()
def test_transport_grpc_default():
# A client should use the gRPC transport by default.
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
)
assert isinstance(
client.transport,
transports.ServiceUsageGrpcTransport,
)
def test_service_usage_base_transport_error():
# Passing both a credentials object and credentials_file should raise an error
with pytest.raises(core_exceptions.DuplicateCredentialArgs):
transport = transports.ServiceUsageTransport(
credentials=ga_credentials.AnonymousCredentials(),
credentials_file="credentials.json"
)
def test_service_usage_base_transport():
# Instantiate the base transport.
with mock.patch('google.api.serviceusage_v1beta1.services.service_usage.transports.ServiceUsageTransport.__init__') as Transport:
Transport.return_value = None
transport = transports.ServiceUsageTransport(
credentials=ga_credentials.AnonymousCredentials(),
)
# Every method on the transport should just blindly
# raise NotImplementedError.
methods = (
'enable_service',
'disable_service',
'get_service',
'list_services',
'batch_enable_services',
'list_consumer_quota_metrics',
'get_consumer_quota_metric',
'get_consumer_quota_limit',
'create_admin_override',
'update_admin_override',
'delete_admin_override',
'list_admin_overrides',
'import_admin_overrides',
'create_consumer_override',
'update_consumer_override',
'delete_consumer_override',
'list_consumer_overrides',
'import_consumer_overrides',
'generate_service_identity',
)
for method in methods:
with pytest.raises(NotImplementedError):
getattr(transport, method)(request=object())
with pytest.raises(NotImplementedError):
transport.close()
# Additionally, the LRO client (a property) should
# also raise NotImplementedError
with pytest.raises(NotImplementedError):
transport.operations_client
@requires_google_auth_gte_1_25_0
def test_service_usage_base_transport_with_credentials_file():
# Instantiate the base transport with a credentials file
with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.api.serviceusage_v1beta1.services.service_usage.transports.ServiceUsageTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ServiceUsageTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with("credentials.json",
scopes=None,
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/cloud-platform.read-only',
'https://www.googleapis.com/auth/service.management',
),
quota_project_id="octopus",
)
@requires_google_auth_lt_1_25_0
def test_service_usage_base_transport_with_credentials_file_old_google_auth():
# Instantiate the base transport with a credentials file
with mock.patch.object(google.auth, 'load_credentials_from_file', autospec=True) as load_creds, mock.patch('google.api.serviceusage_v1beta1.services.service_usage.transports.ServiceUsageTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
load_creds.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ServiceUsageTransport(
credentials_file="credentials.json",
quota_project_id="octopus",
)
load_creds.assert_called_once_with("credentials.json", scopes=(
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/cloud-platform.read-only',
'https://www.googleapis.com/auth/service.management',
),
quota_project_id="octopus",
)
def test_service_usage_base_transport_with_adc():
# Test the default credentials are used if credentials and credentials_file are None.
with mock.patch.object(google.auth, 'default', autospec=True) as adc, mock.patch('google.api.serviceusage_v1beta1.services.service_usage.transports.ServiceUsageTransport._prep_wrapped_messages') as Transport:
Transport.return_value = None
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport = transports.ServiceUsageTransport()
adc.assert_called_once()
@requires_google_auth_gte_1_25_0
def test_service_usage_auth_adc():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ServiceUsageClient()
adc.assert_called_once_with(
scopes=None,
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/cloud-platform.read-only',
'https://www.googleapis.com/auth/service.management',
),
quota_project_id=None,
)
@requires_google_auth_lt_1_25_0
def test_service_usage_auth_adc_old_google_auth():
# If no credentials are provided, we should use ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
ServiceUsageClient()
adc.assert_called_once_with(
scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/service.management',),
quota_project_id=None,
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ServiceUsageGrpcTransport,
transports.ServiceUsageGrpcAsyncIOTransport,
],
)
@requires_google_auth_gte_1_25_0
def test_service_usage_transport_auth_adc(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, 'default', autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus", scopes=["1", "2"])
adc.assert_called_once_with(
scopes=["1", "2"],
default_scopes=( 'https://www.googleapis.com/auth/cloud-platform', 'https://www.googleapis.com/auth/cloud-platform.read-only', 'https://www.googleapis.com/auth/service.management',),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class",
[
transports.ServiceUsageGrpcTransport,
transports.ServiceUsageGrpcAsyncIOTransport,
],
)
@requires_google_auth_lt_1_25_0
def test_service_usage_transport_auth_adc_old_google_auth(transport_class):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc:
adc.return_value = (ga_credentials.AnonymousCredentials(), None)
transport_class(quota_project_id="octopus")
adc.assert_called_once_with(scopes=(
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/cloud-platform.read-only',
'https://www.googleapis.com/auth/service.management',
),
quota_project_id="octopus",
)
@pytest.mark.parametrize(
"transport_class,grpc_helpers",
[
(transports.ServiceUsageGrpcTransport, grpc_helpers),
(transports.ServiceUsageGrpcAsyncIOTransport, grpc_helpers_async)
],
)
def test_service_usage_transport_create_channel(transport_class, grpc_helpers):
# If credentials and host are not provided, the transport class should use
# ADC credentials.
with mock.patch.object(google.auth, "default", autospec=True) as adc, mock.patch.object(
grpc_helpers, "create_channel", autospec=True
) as create_channel:
creds = ga_credentials.AnonymousCredentials()
adc.return_value = (creds, None)
transport_class(
quota_project_id="octopus",
scopes=["1", "2"]
)
create_channel.assert_called_with(
"serviceusage.googleapis.com:443",
credentials=creds,
credentials_file=None,
quota_project_id="octopus",
default_scopes=(
'https://www.googleapis.com/auth/cloud-platform',
'https://www.googleapis.com/auth/cloud-platform.read-only',
'https://www.googleapis.com/auth/service.management',
),
scopes=["1", "2"],
default_host="serviceusage.googleapis.com",
ssl_credentials=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
@pytest.mark.parametrize("transport_class", [transports.ServiceUsageGrpcTransport, transports.ServiceUsageGrpcAsyncIOTransport])
def test_service_usage_grpc_transport_client_cert_source_for_mtls(
transport_class
):
cred = ga_credentials.AnonymousCredentials()
# Check ssl_channel_credentials is used if provided.
with mock.patch.object(transport_class, "create_channel") as mock_create_channel:
mock_ssl_channel_creds = mock.Mock()
transport_class(
host="squid.clam.whelk",
credentials=cred,
ssl_channel_credentials=mock_ssl_channel_creds
)
mock_create_channel.assert_called_once_with(
"squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_channel_creds,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Check if ssl_channel_credentials is not provided, then client_cert_source_for_mtls
# is used.
with mock.patch.object(transport_class, "create_channel", return_value=mock.Mock()):
with mock.patch("grpc.ssl_channel_credentials") as mock_ssl_cred:
transport_class(
credentials=cred,
client_cert_source_for_mtls=client_cert_source_callback
)
expected_cert, expected_key = client_cert_source_callback()
mock_ssl_cred.assert_called_once_with(
certificate_chain=expected_cert,
private_key=expected_key
)
def test_service_usage_host_no_port():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint='serviceusage.googleapis.com'),
)
assert client.transport._host == 'serviceusage.googleapis.com:443'
def test_service_usage_host_with_port():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
client_options=client_options.ClientOptions(api_endpoint='serviceusage.googleapis.com:8000'),
)
assert client.transport._host == 'serviceusage.googleapis.com:8000'
def test_service_usage_grpc_transport_channel():
channel = grpc.secure_channel('http://localhost/', grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ServiceUsageGrpcTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
def test_service_usage_grpc_asyncio_transport_channel():
channel = aio.secure_channel('http://localhost/', grpc.local_channel_credentials())
# Check that channel is used if provided.
transport = transports.ServiceUsageGrpcAsyncIOTransport(
host="squid.clam.whelk",
channel=channel,
)
assert transport.grpc_channel == channel
assert transport._host == "squid.clam.whelk:443"
assert transport._ssl_channel_credentials == None
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize("transport_class", [transports.ServiceUsageGrpcTransport, transports.ServiceUsageGrpcAsyncIOTransport])
def test_service_usage_transport_channel_mtls_with_client_cert_source(
transport_class
):
with mock.patch("grpc.ssl_channel_credentials", autospec=True) as grpc_ssl_channel_cred:
with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
mock_ssl_cred = mock.Mock()
grpc_ssl_channel_cred.return_value = mock_ssl_cred
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
cred = ga_credentials.AnonymousCredentials()
with pytest.warns(DeprecationWarning):
with mock.patch.object(google.auth, 'default') as adc:
adc.return_value = (cred, None)
transport = transport_class(
host="squid.clam.whelk",
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=client_cert_source_callback,
)
adc.assert_called_once()
grpc_ssl_channel_cred.assert_called_once_with(
certificate_chain=b"cert bytes", private_key=b"key bytes"
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
assert transport._ssl_channel_credentials == mock_ssl_cred
# Remove this test when deprecated arguments (api_mtls_endpoint, client_cert_source) are
# removed from grpc/grpc_asyncio transport constructor.
@pytest.mark.parametrize("transport_class", [transports.ServiceUsageGrpcTransport, transports.ServiceUsageGrpcAsyncIOTransport])
def test_service_usage_transport_channel_mtls_with_adc(
transport_class
):
mock_ssl_cred = mock.Mock()
with mock.patch.multiple(
"google.auth.transport.grpc.SslCredentials",
__init__=mock.Mock(return_value=None),
ssl_credentials=mock.PropertyMock(return_value=mock_ssl_cred),
):
with mock.patch.object(transport_class, "create_channel") as grpc_create_channel:
mock_grpc_channel = mock.Mock()
grpc_create_channel.return_value = mock_grpc_channel
mock_cred = mock.Mock()
with pytest.warns(DeprecationWarning):
transport = transport_class(
host="squid.clam.whelk",
credentials=mock_cred,
api_mtls_endpoint="mtls.squid.clam.whelk",
client_cert_source=None,
)
grpc_create_channel.assert_called_once_with(
"mtls.squid.clam.whelk:443",
credentials=mock_cred,
credentials_file=None,
scopes=None,
ssl_credentials=mock_ssl_cred,
quota_project_id=None,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
assert transport.grpc_channel == mock_grpc_channel
def test_service_usage_grpc_lro_client():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc',
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(
transport.operations_client,
operations_v1.OperationsClient,
)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_service_usage_grpc_lro_async_client():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport='grpc_asyncio',
)
transport = client.transport
# Ensure that we have a api-core operations client.
assert isinstance(
transport.operations_client,
operations_v1.OperationsAsyncClient,
)
# Ensure that subsequent calls to the property send the exact same object.
assert transport.operations_client is transport.operations_client
def test_common_billing_account_path():
billing_account = "squid"
expected = "billingAccounts/{billing_account}".format(billing_account=billing_account, )
actual = ServiceUsageClient.common_billing_account_path(billing_account)
assert expected == actual
def test_parse_common_billing_account_path():
expected = {
"billing_account": "clam",
}
path = ServiceUsageClient.common_billing_account_path(**expected)
# Check that the path construction is reversible.
actual = ServiceUsageClient.parse_common_billing_account_path(path)
assert expected == actual
def test_common_folder_path():
folder = "whelk"
expected = "folders/{folder}".format(folder=folder, )
actual = ServiceUsageClient.common_folder_path(folder)
assert expected == actual
def test_parse_common_folder_path():
expected = {
"folder": "octopus",
}
path = ServiceUsageClient.common_folder_path(**expected)
# Check that the path construction is reversible.
actual = ServiceUsageClient.parse_common_folder_path(path)
assert expected == actual
def test_common_organization_path():
organization = "oyster"
expected = "organizations/{organization}".format(organization=organization, )
actual = ServiceUsageClient.common_organization_path(organization)
assert expected == actual
def test_parse_common_organization_path():
expected = {
"organization": "nudibranch",
}
path = ServiceUsageClient.common_organization_path(**expected)
# Check that the path construction is reversible.
actual = ServiceUsageClient.parse_common_organization_path(path)
assert expected == actual
def test_common_project_path():
project = "cuttlefish"
expected = "projects/{project}".format(project=project, )
actual = ServiceUsageClient.common_project_path(project)
assert expected == actual
def test_parse_common_project_path():
expected = {
"project": "mussel",
}
path = ServiceUsageClient.common_project_path(**expected)
# Check that the path construction is reversible.
actual = ServiceUsageClient.parse_common_project_path(path)
assert expected == actual
def test_common_location_path():
project = "winkle"
location = "nautilus"
expected = "projects/{project}/locations/{location}".format(project=project, location=location, )
actual = ServiceUsageClient.common_location_path(project, location)
assert expected == actual
def test_parse_common_location_path():
expected = {
"project": "scallop",
"location": "abalone",
}
path = ServiceUsageClient.common_location_path(**expected)
# Check that the path construction is reversible.
actual = ServiceUsageClient.parse_common_location_path(path)
assert expected == actual
def test_client_withDEFAULT_CLIENT_INFO():
client_info = gapic_v1.client_info.ClientInfo()
with mock.patch.object(transports.ServiceUsageTransport, '_prep_wrapped_messages') as prep:
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
with mock.patch.object(transports.ServiceUsageTransport, '_prep_wrapped_messages') as prep:
transport_class = ServiceUsageClient.get_transport_class()
transport = transport_class(
credentials=ga_credentials.AnonymousCredentials(),
client_info=client_info,
)
prep.assert_called_once_with(client_info)
@pytest.mark.asyncio
async def test_transport_close_async():
client = ServiceUsageAsyncClient(
credentials=ga_credentials.AnonymousCredentials(),
transport="grpc_asyncio",
)
with mock.patch.object(type(getattr(client.transport, "grpc_channel")), "close") as close:
async with client:
close.assert_not_called()
close.assert_called_once()
def test_transport_close():
transports = {
"grpc": "_grpc_channel",
}
for transport, close_name in transports.items():
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport
)
with mock.patch.object(type(getattr(client.transport, close_name)), "close") as close:
with client:
close.assert_not_called()
close.assert_called_once()
def test_client_ctx():
transports = [
'grpc',
]
for transport in transports:
client = ServiceUsageClient(
credentials=ga_credentials.AnonymousCredentials(),
transport=transport
)
# Test client calls underlying transport.
with mock.patch.object(type(client.transport), "close") as close:
close.assert_not_called()
with client:
pass
close.assert_called()
| 37.881604
| 239
| 0.673238
| 19,633
| 175,657
| 5.796567
| 0.024194
| 0.014692
| 0.023874
| 0.024208
| 0.936338
| 0.908123
| 0.890329
| 0.868388
| 0.844777
| 0.830191
| 0
| 0.00445
| 0.245234
| 175,657
| 4,636
| 240
| 37.889776
| 0.85393
| 0.195865
| 0
| 0.739709
| 0
| 0
| 0.073832
| 0.026497
| 0
| 0
| 0
| 0.000216
| 0.127515
| 1
| 0.041164
| false
| 0.00031
| 0.02476
| 0.000619
| 0.066543
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
ba1c2b976be6f318c2d364ed8ea5386ffc7b9127
| 23,474
|
py
|
Python
|
energyquantified/api/srmc.py
|
energyquantified/eq-python-client
|
3ca8423ae29c4a3dbd8f6289ab76a64b21dabf1c
|
[
"Apache-2.0"
] | 4
|
2021-03-02T10:08:37.000Z
|
2021-11-12T08:18:18.000Z
|
energyquantified/api/srmc.py
|
energyquantified/eq-python-client
|
3ca8423ae29c4a3dbd8f6289ab76a64b21dabf1c
|
[
"Apache-2.0"
] | 23
|
2020-07-22T13:41:20.000Z
|
2021-10-12T09:03:27.000Z
|
energyquantified/api/srmc.py
|
energyquantified/eq-python-client
|
3ca8423ae29c4a3dbd8f6289ab76a64b21dabf1c
|
[
"Apache-2.0"
] | 2
|
2020-10-01T20:18:08.000Z
|
2021-03-02T10:08:41.000Z
|
from .base import BaseAPI
from ..exceptions import ValidationError
from ..metadata import Curve, CurveType, OHLCField
from ..parser.srmc import parse_srmc_response
# Tuple of supported values for Curve.curve_type in the time series API
CURVE_TYPES = (CurveType.OHLC,)
class SrmcAPI(BaseAPI):
"""
Operations for SRMC calculations in the API. Access these operations via
an instance of the :py:class:`energyquantified.EnergyQuantified` class:
>>> eq = EnergyQuantified(api_key="aaaa-bbbb-cccc-dddd")
>>> eq.srmc.load_front(curve, begin, end, period='month', front=1)
"""
@staticmethod
def _check_curve_category(curve):
# Check that the curve has either 'gas' or 'coal' in its name
if isinstance(curve, Curve):
name = curve.name.lower()
if not 'gas' in name and not 'coal' in name:
raise ValidationError(
reason="Provide a coal or gas curve",
parameter="curve"
)
elif isinstance(curve, str):
name = curve.lower()
if not 'gas' in name and not 'coal' in name:
raise ValidationError(
reason="Provide a coal or gas curve",
parameter="curve"
)
def load_front(
self,
curve,
begin=None,
end=None,
period=None,
front=None,
gas_therm_to_mwh=None,
api2_tonne_to_mwh=None,
carbon_emissions=None,
efficiency=None,
carbon_tax_area=None):
"""
Calculate historical short-run margincal costs (SRMC) for a
continuous front contract.
SRMC is calculated from a **coal** or **gas** curve of your choosing.
It uses the daily reference rates from the European Central Bank (ECB)
for currency conversions. The EUA price in the calculation is the
settlement price from ICE.
Some countries, such as Great Britain, has an additional flat tax on
carbon emissions. Specify the ``carbon_tax_area`` parameter to apply
tax rules for a specific country.
This operation works for **coal** or **gas** curves with
``curve_type = OHLC`` only.
:param curve: The curve or curve name
:type curve: :py:class:`energyquantified.metadata.Curve`, str
:param begin: The begin date
:type begin: date, str, required
:param end: The end date
:type end: date, str, required
:param period: Filter on contract period (day, week, month etc.),\
defaults to None
:type period: ContractPeriod, str, required
:param front: The front contract (1=front, 2=second front, etc.)
:type front: int, required
:param gas_therm_to_mwh: Conversion from pence/therm to GBP/MWh\
(still in higher-heating value). Defaults to 0.029307.
:type gas_therm_to_mwh: float, optional
:param api2_tonne_to_mwh: Conversion from API2 coal to MWh.\
Defaults to 6.978.
:type api2_tonne_to_mwh: float, optional
:param carbon_emissions: The carbon content as tCO2/MWh. This value\
varies between coal and gas. For coal, the default factor is\
0.34056. For gas, the default factor is 0.202.
:type carbon_emissions: float, optional
:param efficiency: The energy efficiency. For coal, the default\
factor is 0.42. For gas, the default factor is 0.59.
:type efficiency: float, optional
:param carbon_tax_area: Set an area to apply tax rules for.
:type carbon_tax_area: Area, str, optional
:return: An SRMC object with a list of OHLC objects
:rtype: :py:class:`energyquantified.data.SRMC`
"""
# Build URL
safe_curve = self._urlencode_curve_name(curve, curve_types=CURVE_TYPES)
self._check_curve_category(curve)
url = f"/srmc/{safe_curve}/"
# Parameters
params = {}
self._add_date(params, "begin", begin, required=True)
self._add_date(params, "end", end, required=True)
self._add_contract_period(params, "period", period, required=True)
self._add_int(params, "front", front, min=1, required=True)
self._add_number(params, "gas-therm-to-mwh", gas_therm_to_mwh)
self._add_number(params, "api2-tonne-to-mwh", api2_tonne_to_mwh)
self._add_number(params, "carbon-emissions", carbon_emissions, min=0.01, max=1.0)
self._add_number(params, "efficiency", efficiency, min=0.01, max=1.0)
self._add_area(params, "carbon-tax-area", carbon_tax_area)
# HTTP request
response = self._get(url, params=params)
return parse_srmc_response(response.json())
def load_delivery(
self,
curve,
begin=None,
end=None,
period=None,
delivery=None,
gas_therm_to_mwh=None,
api2_tonne_to_mwh=None,
carbon_emissions=None,
efficiency=None,
carbon_tax_area=None):
"""
Calculate historical short-run margincal costs (SRMC) for a specific
contract, such as the year 2021, the month Jan 2021, etc.
SRMC is calculated from a **coal** or **gas** curve of your choosing.
It uses the daily reference rates from the European Central Bank (ECB)
for currency conversions. The EUA price in the calculation is the
settlement price from ICE.
Some countries, such as Great Britain, has an additional flat tax on
carbon emissions. Specify the ``carbon_tax_area`` parameter to apply
tax rules for a specific country.
This operation works for **coal** or **gas** curves with
``curve_type = OHLC`` only.
:param curve: The curve or curve name
:type curve: :py:class:`energyquantified.metadata.Curve`, str
:param begin: The begin date
:type begin: date, str, required
:param end: The end date
:type end: date, str, required
:param period: Filter on contract period (day, week, month etc.),\
defaults to None
:type period: ContractPeriod, str, required
:param delivery: Filter on delivery date, requires parameter ``period``\
to be set; cannot be used together with ``front``, defaults to None
:type delivery: date, str, required
:param gas_therm_to_mwh: Conversion from pence/therm to GBP/MWh\
(still in higher-heating value). Defaults to 0.029307.
:type gas_therm_to_mwh: float, optional
:param api2_tonne_to_mwh: Conversion from API2 coal to MWh.\
Defaults to 6.978.
:type api2_tonne_to_mwh: float, optional
:param carbon_emissions: The carbon content as tCO2/MWh. This value\
varies between coal and gas. For coal, the default factor is\
0.34056. For gas, the default factor is 0.202.
:type carbon_emissions: float, optional
:param efficiency: The energy efficiency. For coal, the default\
factor is 0.42. For gas, the default factor is 0.59.
:type efficiency: float, optional
:param carbon_tax_area: Set an area to apply tax rules for.
:type carbon_tax_area: Area, str, optional
:return: An SRMC object with a list of OHLC objects
:rtype: :py:class:`energyquantified.data.SRMC`
"""
# Build URL
safe_curve = self._urlencode_curve_name(curve, curve_types=CURVE_TYPES)
self._check_curve_category(curve)
url = f"/srmc/{safe_curve}/"
# Parameters
params = {}
self._add_date(params, "begin", begin, required=True)
self._add_date(params, "end", end, required=True)
self._add_contract_period(params, "period", period, required=True)
self._add_date(params, "delivery", delivery, required=True)
self._add_number(params, "gas-therm-to-mwh", gas_therm_to_mwh)
self._add_number(params, "api2-tonne-to-mwh", api2_tonne_to_mwh)
self._add_number(params, "carbon-emissions", carbon_emissions, min=0.01, max=1.0)
self._add_number(params, "efficiency", efficiency, min=0.01, max=1.0)
self._add_area(params, "carbon-tax-area", carbon_tax_area)
# HTTP request
response = self._get(url, params=params)
return parse_srmc_response(response.json())
def load_front_as_timeseries(
self,
curve,
begin=None,
end=None,
frequency=None,
period=None,
front=None,
fill=None,
gas_therm_to_mwh=None,
api2_tonne_to_mwh=None,
carbon_emissions=None,
efficiency=None,
carbon_tax_area=None):
"""
Calculate historical short-run margincal costs (SRMC) for a continuous
front contract and convert the result to a daily time series.
SRMC is calculated from a **coal** or **gas** curve of your choosing.
It uses the daily reference rates from the European Central Bank (ECB)
for currency conversions. The EUA price in the calculation is the
settlement price from ICE.
Some countries, such as Great Britain, has an additional flat tax on
carbon emissions. Specify the ``carbon_tax_area`` parameter to apply
tax rules for a specific country.
This operation works for **coal** or **gas** curves with
``curve_type = OHLC`` only.
:param curve: The curve or curve name
:type curve: :py:class:`energyquantified.metadata.Curve`, str
:param begin: The begin date
:type begin: date, str, required
:param end: The end date
:type end: date, str, required
:param period: Filter on contract period (day, week, month etc.),\
defaults to None
:type period: ContractPeriod, str, required
:param front: The front contract (1=front, 2=second front, etc.)
:type front: int, required
:param fill: How to handle days without trades. Allowed values are:\
``no-fill`` do nothing, ``fill-holes`` fill in holes with data\
from previous trading day, ``forward-fill`` fill in all blanks\
with data from the previous trading day (also into the future).\
Defaults to ``no-fill``.
:type fill: str, optional
:param gas_therm_to_mwh: Conversion from pence/therm to GBP/MWh\
(still in higher-heating value). Defaults to 0.029307.
:type gas_therm_to_mwh: float, optional
:param api2_tonne_to_mwh: Conversion from API2 coal to MWh.\
Defaults to 6.978.
:type api2_tonne_to_mwh: float, optional
:param carbon_emissions: The carbon content as tCO2/MWh. This value\
varies between coal and gas. For coal, the default factor is\
0.34056. For gas, the default factor is 0.202.
:type carbon_emissions: float, optional
:param efficiency: The energy efficiency. For coal, the default\
factor is 0.42. For gas, the default factor is 0.59.
:type efficiency: float, optional
:param carbon_tax_area: Set an area to apply tax rules for.
:type carbon_tax_area: Area, str, optional
:return: An SRMC object with a ``timeseries`` object
:rtype: :py:class:`energyquantified.data.SRMC`
"""
# Build URL
safe_curve = self._urlencode_curve_name(curve, curve_types=CURVE_TYPES)
self._check_curve_category(curve)
url = f"/srmc/{safe_curve}/timeseries/"
# Parameters
params = {}
self._add_date(params, "begin", begin, required=True)
self._add_date(params, "end", end, required=True)
self._add_contract_period(params, "period", period, required=True)
self._add_int(params, "front", front, min=1, required=True)
self._add_fill(params, "fill", fill)
self._add_number(params, "gas-therm-to-mwh", gas_therm_to_mwh)
self._add_number(params, "api2-tonne-to-mwh", api2_tonne_to_mwh)
self._add_number(params, "carbon-emissions", carbon_emissions, min=0.01, max=1.0)
self._add_number(params, "efficiency", efficiency, min=0.01, max=1.0)
self._add_area(params, "carbon-tax-area", carbon_tax_area)
# HTTP request
response = self._get(url, params=params)
return parse_srmc_response(response.json())
def load_delivery_as_timeseries(
self,
curve,
begin=None,
end=None,
period=None,
delivery=None,
fill=None,
gas_therm_to_mwh=None,
api2_tonne_to_mwh=None,
carbon_emissions=None,
efficiency=None,
carbon_tax_area=None):
"""
Calculate historical short-run margincal costs (SRMC) for a specific
contract and convert it to a daily time series.
A specific contract could be the year 2021, the month Jan 2021, etc.
SRMC is calculated from a **coal** or **gas** curve of your choosing.
It uses the daily reference rates from the European Central Bank (ECB)
for currency conversions. The EUA price in the calculation is the
settlement price from ICE.
Some countries, such as Great Britain, has an additional flat tax on
carbon emissions. Specify the ``carbon_tax_area`` parameter to apply
tax rules for a specific country.
This operation works for **coal** or **gas** curves with
``curve_type = OHLC`` only.
:param curve: The curve or curve name
:type curve: :py:class:`energyquantified.metadata.Curve`, str
:param begin: The begin date
:type begin: date, str, required
:param end: The end date
:type end: date, str, required
:param period: Filter on contract period (day, week, month etc.),\
defaults to None
:type period: ContractPeriod, str, required
:param delivery: Filter on delivery date, requires parameter ``period``\
to be set; cannot be used together with ``front``, defaults to None
:type delivery: date, str, required
:param fill: How to handle days without trades. Allowed values are:\
``no-fill`` do nothing, ``fill-holes`` fill in holes with data\
from previous trading day, ``forward-fill`` fill in all blanks\
with data from the previous trading day (also into the future).\
Defaults to ``no-fill``.
:type fill: str, optional
:param gas_therm_to_mwh: Conversion from pence/therm to GBP/MWh\
(still in higher-heating value). Defaults to 0.029307.
:type gas_therm_to_mwh: float, optional
:param api2_tonne_to_mwh: Conversion from API2 coal to MWh.\
Defaults to 6.978.
:type api2_tonne_to_mwh: float, optional
:param carbon_emissions: The carbon content as tCO2/MWh. This value\
varies between coal and gas. For coal, the default factor is\
0.34056. For gas, the default factor is 0.202.
:type carbon_emissions: float, optional
:param efficiency: The energy efficiency. For coal, the default\
factor is 0.42. For gas, the default factor is 0.59.
:type efficiency: float, optional
:param carbon_tax_area: Set an area to apply tax rules for.
:type carbon_tax_area: Area, str, optional
:return: An SRMC object with a ``timeseries`` object
:rtype: :py:class:`energyquantified.data.SRMC`
"""
# Build URL
safe_curve = self._urlencode_curve_name(curve, curve_types=CURVE_TYPES)
self._check_curve_category(curve)
url = f"/srmc/{safe_curve}/timeseries/"
# Parameters
params = {}
self._add_date(params, "begin", begin, required=True)
self._add_date(params, "end", end, required=True)
self._add_contract_period(params, "period", period, required=True)
self._add_date(params, "delivery", delivery, required=True)
self._add_fill(params, "fill", fill)
self._add_number(params, "gas-therm-to-mwh", gas_therm_to_mwh)
self._add_number(params, "api2-tonne-to-mwh", api2_tonne_to_mwh)
self._add_number(params, "carbon-emissions", carbon_emissions, min=0.01, max=1.0)
self._add_number(params, "efficiency", efficiency, min=0.01, max=1.0)
self._add_area(params, "carbon-tax-area", carbon_tax_area)
# HTTP request
response = self._get(url, params=params)
return parse_srmc_response(response.json())
def latest(
self,
curve,
date=None,
gas_therm_to_mwh=None,
api2_tonne_to_mwh=None,
carbon_emissions=None,
efficiency=None,
carbon_tax_area=None):
"""
Calculate short-run margincal costs (SRMC) for all settlement prices
from a trading day. Defaults to using OHLC data from the latest
available trading day, hence ``latest()``.
If ``date`` is given, this method will try to fetch OHLC data for
that trading day. When there is no data for the given day, OHLC data
will be loaded for the closest trading day earlier in time with data.
SRMC is calculated from a **coal** or **gas** curve of your choosing.
It uses the daily reference rates from the European Central Bank (ECB)
for currency conversions. The EUA price in the calculation is the
settlement price from ICE.
Some countries, such as Great Britain, has an additional flat tax on
carbon emissions. Specify the ``carbon_tax_area`` parameter to apply
tax rules for a specific country.
This operation works for **coal** or **gas** curves with
``curve_type = OHLC`` only.
:param curve: The curve or curve name
:type curve: :py:class:`energyquantified.metadata.Curve`, str
:param date: The trading date, defaults to today
:type date: date, str, required
:param gas_therm_to_mwh: Conversion from pence/therm to GBP/MWh\
(still in higher-heating value). Defaults to 0.029307.
:type gas_therm_to_mwh: float, optional
:param api2_tonne_to_mwh: Conversion from API2 coal to MWh.\
Defaults to 6.978.
:type api2_tonne_to_mwh: float, optional
:param carbon_emissions: The carbon content as tCO2/MWh. This value\
varies between coal and gas. For coal, the default factor is\
0.34056. For gas, the default factor is 0.202.
:type carbon_emissions: float, optional
:param efficiency: The energy efficiency. For coal, the default\
factor is 0.42. For gas, the default factor is 0.59.
:type efficiency: float, optional
:param carbon_tax_area: Set an area to apply tax rules for.
:type carbon_tax_area: Area, str, optional
:return: An SRMC object with a list of OHLC objects
:rtype: :py:class:`energyquantified.data.SRMC`
"""
# Build URL
safe_curve = self._urlencode_curve_name(curve, curve_types=CURVE_TYPES)
self._check_curve_category(curve)
url = f"/srmc/{safe_curve}/latest/"
# Parameters
params = {}
self._add_date(params, "date", date)
self._add_number(params, "gas-therm-to-mwh", gas_therm_to_mwh)
self._add_number(params, "api2-tonne-to-mwh", api2_tonne_to_mwh)
self._add_number(params, "carbon-emissions", carbon_emissions, min=0.01, max=1.0)
self._add_number(params, "efficiency", efficiency, min=0.01, max=1.0)
self._add_area(params, "carbon-tax-area", carbon_tax_area)
# HTTP request
response = self._get(url, params=params)
return parse_srmc_response(response.json())
def latest_as_periods(
self,
curve,
date=None,
gas_therm_to_mwh=None,
api2_tonne_to_mwh=None,
carbon_emissions=None,
efficiency=None,
carbon_tax_area=None):
"""
Calculate short-run margincal costs (SRMC) for all settlement prices
from a trading day, sort them, and merge/convert them to a continuous
series.
Defaults to using OHLC data from the latest available trading day,
hence ``latest`` in the method name.
If ``date`` is given, this method will try to fetch OHLC data for
that trading day. When there is no data for the given day, OHLC data
will be loaded for the closest trading day earlier in time with data.
SRMC is calculated from a **coal** or **gas** curve of your choosing.
It uses the daily reference rates from the European Central Bank (ECB)
for currency conversions. The EUA price in the calculation is the
settlement price from ICE.
Some countries, such as Great Britain, has an additional flat tax on
carbon emissions. Specify the ``carbon_tax_area`` parameter to apply
tax rules for a specific country.
This operation works for **coal** or **gas** curves with
``curve_type = OHLC`` only.
:param curve: The curve or curve name
:type curve: :py:class:`energyquantified.metadata.Curve`, str
:param date: The trading date, defaults to today
:type date: date, str, required
:param gas_therm_to_mwh: Conversion from pence/therm to GBP/MWh\
(still in higher-heating value). Defaults to 0.029307.
:type gas_therm_to_mwh: float, optional
:param api2_tonne_to_mwh: Conversion from API2 coal to MWh.\
Defaults to 6.978.
:type api2_tonne_to_mwh: float, optional
:param carbon_emissions: The carbon content as tCO2/MWh. This value\
varies between coal and gas. For coal, the default factor is\
0.34056. For gas, the default factor is 0.202.
:type carbon_emissions: float, optional
:param efficiency: The energy efficiency. For coal, the default\
factor is 0.42. For gas, the default factor is 0.59.
:type efficiency: float, optional
:param carbon_tax_area: Set an area to apply tax rules for.
:type carbon_tax_area: Area, str, optional
:return: An SRMC object with a period-based series
:rtype: :py:class:`energyquantified.data.SRMC`
"""
# Build URL
safe_curve = self._urlencode_curve_name(curve, curve_types=CURVE_TYPES)
self._check_curve_category(curve)
url = f"/srmc/{safe_curve}/latest/periods/"
# Parameters
params = {}
self._add_date(params, "date", date)
self._add_number(params, "gas-therm-to-mwh", gas_therm_to_mwh)
self._add_number(params, "api2-tonne-to-mwh", api2_tonne_to_mwh)
self._add_number(params, "carbon-emissions", carbon_emissions, min=0.01, max=1.0)
self._add_number(params, "efficiency", efficiency, min=0.01, max=1.0)
self._add_area(params, "carbon-tax-area", carbon_tax_area)
# HTTP request
response = self._get(url, params=params)
return parse_srmc_response(response.json())
| 46.854291
| 89
| 0.637301
| 3,147
| 23,474
| 4.615507
| 0.079441
| 0.022719
| 0.03222
| 0.02685
| 0.947194
| 0.943064
| 0.943064
| 0.943064
| 0.937281
| 0.937281
| 0
| 0.016902
| 0.276647
| 23,474
| 500
| 90
| 46.948
| 0.838516
| 0.571824
| 0
| 0.870057
| 0
| 0
| 0.09467
| 0.014602
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039548
| false
| 0
| 0.022599
| 0
| 0.101695
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e85639732b8d92a9d746c7a67e070aafeb27e036
| 254
|
py
|
Python
|
plugins/zscaler/icon_zscaler/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 46
|
2019-06-05T20:47:58.000Z
|
2022-03-29T10:18:01.000Z
|
plugins/zscaler/icon_zscaler/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 386
|
2019-06-07T20:20:39.000Z
|
2022-03-30T17:35:01.000Z
|
plugins/zscaler/icon_zscaler/actions/__init__.py
|
lukaszlaszuk/insightconnect-plugins
|
8c6ce323bfbb12c55f8b5a9c08975d25eb9f8892
|
[
"MIT"
] | 43
|
2019-07-09T14:13:58.000Z
|
2022-03-28T12:04:46.000Z
|
# GENERATED BY KOMAND SDK - DO NOT EDIT
from .blacklist_url.action import BlacklistUrl
from .get_blacklist_url.action import GetBlacklistUrl
from .get_sandbox_report_for_hash.action import GetSandboxReportForHash
from .lookup_url.action import LookupUrl
| 42.333333
| 71
| 0.862205
| 35
| 254
| 6.028571
| 0.628571
| 0.227488
| 0.21327
| 0.227488
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.098425
| 254
| 5
| 72
| 50.8
| 0.921397
| 0.145669
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e87ebff9d676870b9c12405f5fb06b2ee29d1e90
| 65
|
py
|
Python
|
hello.py
|
jirapoc/mycodes
|
bea3a8691ac3a734ccf29eb0b763eb30f8606b05
|
[
"Unlicense"
] | null | null | null |
hello.py
|
jirapoc/mycodes
|
bea3a8691ac3a734ccf29eb0b763eb30f8606b05
|
[
"Unlicense"
] | 1
|
2021-03-02T07:37:00.000Z
|
2021-03-02T07:37:00.000Z
|
hello.py
|
jirapoc/mycodes
|
bea3a8691ac3a734ccf29eb0b763eb30f8606b05
|
[
"Unlicense"
] | null | null | null |
print("Hello Paevee")
print("Hello Paevee")
print("Hello Paevee")
| 21.666667
| 21
| 0.738462
| 9
| 65
| 5.333333
| 0.333333
| 0.625
| 1
| 0.875
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 65
| 3
| 22
| 21.666667
| 0.8
| 0
| 0
| 1
| 0
| 0
| 0.545455
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 1
|
0
| 10
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.