hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e027e2b62fbe52bbc2516f47374f1a6c70aedb19
| 17,133
|
py
|
Python
|
unit_tests/test_tlslite_handshakehelpers.py
|
tomato42/tlslite-1
|
4631799cdfac8f90b567d455e698b05d7a917599
|
[
"Unlicense"
] | 121
|
2015-05-28T18:14:37.000Z
|
2020-11-18T11:23:59.000Z
|
unit_tests/test_tlslite_handshakehelpers.py
|
tomato42/tlslite-1
|
4631799cdfac8f90b567d455e698b05d7a917599
|
[
"Unlicense"
] | 340
|
2015-05-28T15:56:11.000Z
|
2020-11-04T11:40:45.000Z
|
unit_tests/test_tlslite_handshakehelpers.py
|
tomato42/tlslite-1
|
4631799cdfac8f90b567d455e698b05d7a917599
|
[
"Unlicense"
] | 60
|
2015-07-10T20:07:02.000Z
|
2020-10-22T08:04:20.000Z
|
# Copyright (c) 2014, Karel Srot
#
# See the LICENSE file for legal information regarding use of this file.
# compatibility with Python 2.6, for that we need unittest2 package,
# which is not available on 3.3 or 3.4
try:
import unittest2 as unittest
except ImportError:
import unittest
from tlslite.handshakehelpers import HandshakeHelpers
from tlslite.messages import ClientHello, NewSessionTicket
from tlslite.extensions import SNIExtension, PreSharedKeyExtension, \
PskIdentity
from tlslite.handshakehashes import HandshakeHashes
from tlslite.errors import TLSIllegalParameterException
class TestHandshakeHelpers(unittest.TestCase):
def test_alignClientHelloPadding_length_less_than_256_bytes(self):
clientHello = ClientHello()
clientHello.create((3,0), bytearray(32), bytearray(0), [])
clientHelloLength = len(clientHello.write())
self.assertTrue(clientHelloLength - 4 < 256)
HandshakeHelpers.alignClientHelloPadding(clientHello)
# clientHello should not be changed due to small length
self.assertEqual(clientHelloLength, len(clientHello.write()))
def test_alignClientHelloPadding_length_256_bytes(self):
clientHello = ClientHello()
clientHello.create((3,0), bytearray(32), bytearray(0), [])
clientHello.extensions = []
ext = SNIExtension()
ext.create(hostNames=[
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeee'),
])
clientHello.extensions.append(ext)
clientHelloLength = len(clientHello.write())
# clientHello length (excluding 4B header) should equal to 256
self.assertEqual(256, clientHelloLength - 4)
HandshakeHelpers.alignClientHelloPadding(clientHello)
# clientHello length (excluding 4B header) should equal to 512
data = clientHello.write()
self.assertEqual(512, len(data) - 4)
# previously created data should be extended with the padding extension
# starting with the padding extension type \x00\x15 (21)
self.assertEqual(bytearray(b'\x00\x15'), data[clientHelloLength:clientHelloLength+2])
def test_alignClientHelloPadding_length_of_508_bytes(self):
clientHello = ClientHello()
clientHello.create((3,0), bytearray(32), bytearray(0), [])
clientHello.extensions = []
ext = SNIExtension()
ext.create(hostNames=[
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccdddd'),
])
clientHello.extensions.append(ext)
clientHelloLength = len(clientHello.write())
self.assertEqual(508, clientHelloLength - 4)
HandshakeHelpers.alignClientHelloPadding(clientHello)
# clientHello length should equal to 512, ignoring handshake
# protocol header (4B)
data = clientHello.write()
self.assertEqual(512, len(data) - 4)
# padding extension should have zero byte size
self.assertEqual(bytearray(b'\x00\x15\x00\x00'), data[clientHelloLength:])
def test_alignClientHelloPadding_length_of_511_bytes(self):
clientHello = ClientHello()
clientHello.create((3,0), bytearray(32), bytearray(0), [])
clientHello.extensions = []
ext = SNIExtension()
ext.create(hostNames=[
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddd'),
])
clientHello.extensions.append(ext)
clientHelloLength = len(clientHello.write())
self.assertEqual(511, clientHelloLength - 4)
HandshakeHelpers.alignClientHelloPadding(clientHello)
# clientHello length should equal to 515, ignoring handshake
# protocol header (4B)
data = clientHello.write()
self.assertEqual(515, len(data) - 4)
# padding extension should have zero byte size
self.assertEqual(bytearray(b'\x00\x15\x00\x00'), data[clientHelloLength:])
def test_alignClientHelloPadding_length_of_512_bytes(self):
clientHello = ClientHello()
clientHello.create((3,0), bytearray(32), bytearray(0), [])
clientHello.extensions = []
ext = SNIExtension()
ext.create(hostNames=[
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccddddddddddeeeeeeeeee'),
bytearray(b'aaaaaaaaaabbbbbbbbbbccccccccccdddddddd'),
])
clientHello.extensions.append(ext)
clientHelloLength = len(clientHello.write())
self.assertEqual(512, clientHelloLength - 4)
HandshakeHelpers.alignClientHelloPadding(clientHello)
# clientHello should not be changed due to sufficient length (>=512)
self.assertEqual(clientHelloLength, len(clientHello.write()))
def test_alignClientHelloPadding_extension_list_initialization(self):
clientHello = ClientHello()
clientHello.create((3,0), bytearray(32), bytearray(0), range(0, 129))
clientHelloLength = len(clientHello.write())
self.assertTrue(512 > clientHelloLength - 4 > 255)
HandshakeHelpers.alignClientHelloPadding(clientHello)
# verify that the extension list has been added to clientHello
self.assertTrue(type(clientHello.extensions) is list)
# clientHello length should equal to 512, ignoring handshake
# protocol header (4B)
data = clientHello.write()
self.assertEqual(512, len(data) - 4)
# padding extension should have been added after 2 extra bytes
# added due to an extension list
self.assertEqual(bytearray(b'\x00\x15'), data[clientHelloLength+2:clientHelloLength+4])
def test_update_binders_wrong_last_ext(self):
"""
PSK binders mandate that the PSK extension be the very last extension
in client hello (as it's necessary to truncate the body of the hello
up to the PSK extension and calculate hash over it)
check if the updater will abort if the passed in message has
PSK extension that is not last
"""
clientHello = ClientHello()
clientHello.create((3, 3), bytearray(32), bytearray(0), [0])
identities = [PskIdentity().create(bytearray(b'test'), 0)]
binders = [bytearray(32)]
psk_ext = PreSharedKeyExtension().create(identities, binders)
sni_ext = SNIExtension().create(b'example.com')
clientHello.extensions = [psk_ext, sni_ext]
hh = HandshakeHashes()
pskConfigs = [(b'test', b'\x00\x12\x13')]
with self.assertRaises(ValueError) as e:
HandshakeHelpers.update_binders(clientHello, hh, pskConfigs)
self.assertIn('Last extension', str(e.exception))
def test_update_binders_with_wrong_config(self):
"""
Updater requires all binders to be have associated configurations
otherwise it wouldb't be able to calculate a new binder value
in this case, the identity in ClientHello is "test" while in
configurations it's "example"
"""
clientHello = ClientHello()
clientHello.create((3, 3), bytearray(32), bytearray(0), [0])
identities = [PskIdentity().create(bytearray(b'test'), 0)]
binders = [bytearray(32)]
psk_ext = PreSharedKeyExtension().create(identities, binders)
clientHello.extensions = [psk_ext]
hh = HandshakeHashes()
pskConfigs = [(b'example', b'\x00\x12\x13')]
with self.assertRaises(ValueError) as e:
HandshakeHelpers.update_binders(clientHello, hh, pskConfigs)
self.assertIn('psk_configs', str(e.exception))
def test_update_binders_default_prf(self):
"""
Verify that configurations that don't specify the associated hash
explicitly still work correctly (as the TLS 1.3 standard mandates
that SHA-256 is used by default)
"""
clientHello = ClientHello()
clientHello.create((3, 3), bytearray(32), bytearray(0), [0])
identities = [PskIdentity().create(bytearray(b'test'), 0)]
binders = [bytearray(32)]
psk_ext = PreSharedKeyExtension().create(identities, binders)
clientHello.extensions = [psk_ext]
hh = HandshakeHashes()
pskConfigs = [(b'test', b'\x00\x12\x13')]
HandshakeHelpers.update_binders(clientHello, hh, pskConfigs)
self.assertIsInstance(clientHello.extensions[-1],
PreSharedKeyExtension)
ch_ext = clientHello.extensions[-1]
self.assertEqual(ch_ext.identities, identities)
self.assertEqual(ch_ext.binders,
[bytearray(b'wOl\xbe\x9b\xca\xa4\xf3tS\x08M\ta\xa2t'
b'\xa5lYF\xb7\x01F{M\xab\x85R\xa3'
b'\xf3\x11^')])
def test_update_binders_sha256_prf(self):
"""Check if we can calculate a binder that uses SHA-256 PRF."""
clientHello = ClientHello()
clientHello.create((3, 3), bytearray(32), bytearray(0), [0])
identities = [PskIdentity().create(bytearray(b'test'), 0)]
binders = [bytearray(32)]
psk_ext = PreSharedKeyExtension().create(identities, binders)
clientHello.extensions = [psk_ext]
hh = HandshakeHashes()
pskConfigs = [(b'test', b'\x00\x12\x13', 'sha256')]
HandshakeHelpers.update_binders(clientHello, hh, pskConfigs)
self.assertIsInstance(clientHello.extensions[-1],
PreSharedKeyExtension)
ch_ext = clientHello.extensions[-1]
self.assertEqual(ch_ext.identities, identities)
self.assertEqual(ch_ext.binders,
[bytearray(b'wOl\xbe\x9b\xca\xa4\xf3tS\x08M\ta\xa2t'
b'\xa5lYF\xb7\x01F{M\xab\x85R\xa3'
b'\xf3\x11^')])
def test_update_binders_sha384_prf(self):
"""Check if we can calculate a binder that uses SHA-384 PRF."""
clientHello = ClientHello()
clientHello.create((3, 3), bytearray(32), bytearray(0), [0])
identities = [PskIdentity().create(bytearray(b'test'), 0)]
binders = [bytearray(48)]
psk_ext = PreSharedKeyExtension().create(identities, binders)
clientHello.extensions = [psk_ext]
hh = HandshakeHashes()
pskConfigs = [(b'test', b'\x00\x12\x13', 'sha384')]
HandshakeHelpers.update_binders(clientHello, hh, pskConfigs)
self.assertIsInstance(clientHello.extensions[-1],
PreSharedKeyExtension)
ch_ext = clientHello.extensions[-1]
self.assertEqual(ch_ext.identities, identities)
self.assertEqual(ch_ext.binders,
[bytearray(b'\x8d\x92\xd2\xb7+D&\xd7\x0e>x\x1a\xc5i+'
b'M\x0e\xd2\xfe\xd6\x11\x07\n\x0c\xdc\xcf'
b'\xee\xf43\x8e\x9b@z\x00\xbcE\xff\x15%'
b'\xdc\xee\xb4\x1c\x8f\\\x03Z\xc5')])
def test_update_binders_with_ticket(self):
clientHello = ClientHello()
clientHello.create((3, 3), bytearray(32), bytearray(0), [0])
identities = [PskIdentity().create(bytearray(b'\x00ticket\x00ident'),
123)]
binders = [bytearray(48)]
psk_ext = PreSharedKeyExtension().create(identities, binders)
clientHello.extensions = [psk_ext]
ticket = NewSessionTicket().create(3600, # ticket lifetime
123, # age_add
bytearray(b'\xc0' * 48), # nonce
bytearray(b'\x00ticket\x00ident'),
[])
hh = HandshakeHashes()
resum_master_secret = bytearray(b'\x01' * 48)
HandshakeHelpers.update_binders(clientHello, hh, [], [ticket],
resum_master_secret)
self.assertIsInstance(clientHello.extensions[-1],
PreSharedKeyExtension)
ch_ext = clientHello.extensions[-1]
self.assertEqual(ch_ext.identities, identities)
self.assertEqual(ch_ext.binders,
[bytearray(b'<\x03\xcd\xd5\xce\xaeo\x8d\xc6\x8c\xe3'
b'\xe3\xbc\xa2h\xdcm0+\xa7\xbe\xf7\x9ca-'
b'\xcc\x0c\xdb\xb2ZtE\x1e:\xe2\xc4\xb8'
b'\x1bd\x10wN\x8a\xb0\x90\x7f\xb1F')])
def test_update_binders_with_missing_secret(self):
clientHello = ClientHello()
psk = PreSharedKeyExtension()
clientHello.extensions = [psk]
hh = HandshakeHashes()
with self.assertRaises(ValueError):
HandshakeHelpers.update_binders(clientHello, hh, [], [None])
def test_verify_binder_with_wrong_extension(self):
clientHello = ClientHello()
clientHello.create((3, 3), bytearray(32), bytearray(0), [0])
identities = [PskIdentity().create(bytearray(b'test'), 0)]
binders = [bytearray(32)]
psk_ext = PreSharedKeyExtension().create(identities, binders)
sni_ext = SNIExtension().create(b'example.com')
clientHello.extensions = [psk_ext, sni_ext]
hh = HandshakeHashes()
secret = b'\x00\x12\x13'
with self.assertRaises(TLSIllegalParameterException) as e:
HandshakeHelpers.verify_binder(clientHello, hh, 0, secret,
'sha256')
self.assertIn('Last extension', str(e.exception))
def test_verify_binder(self):
clientHello = ClientHello()
clientHello.create((3, 3), bytearray(32), bytearray(0), [0])
identities = [PskIdentity().create(bytearray(b'test'), 0)]
binders = [bytearray(b'\x8d\x92\xd2\xb7+D&\xd7\x0e>x\x1a\xc5i+'
b'M\x0e\xd2\xfe\xd6\x11\x07\n\x0c\xdc\xcf'
b'\xee\xf43\x8e\x9b@z\x00\xbcE\xff\x15%'
b'\xdc\xee\xb4\x1c\x8f\\\x03Z\xc5')]
psk_ext = PreSharedKeyExtension().create(identities, binders)
clientHello.extensions = [psk_ext]
hh = HandshakeHashes()
secret = b'\x00\x12\x13'
ret = HandshakeHelpers.verify_binder(clientHello, hh, 0, secret,
'sha384')
self.assertIs(ret, True)
def test_verify_binder_with_wrong_binder(self):
clientHello = ClientHello()
clientHello.create((3, 3), bytearray(32), bytearray(0), [0])
identities = [PskIdentity().create(bytearray(b'test'), 0)]
binders = [bytearray(48)]
psk_ext = PreSharedKeyExtension().create(identities, binders)
clientHello.extensions = [psk_ext]
hh = HandshakeHashes()
secret = b'\x00\x12\x13'
with self.assertRaises(TLSIllegalParameterException) as e:
HandshakeHelpers.verify_binder(clientHello, hh, 0, secret,
'sha384')
self.assertIn('not verify', str(e.exception))
if __name__ == '__main__':
unittest.main()
| 44.043702
| 95
| 0.642736
| 1,627
| 17,133
| 6.688384
| 0.186232
| 0.047785
| 0.14887
| 0.1712
| 0.777431
| 0.761165
| 0.745176
| 0.7347
| 0.710439
| 0.677908
| 0
| 0.037359
| 0.254771
| 17,133
| 388
| 96
| 44.157216
| 0.814928
| 0.109146
| 0
| 0.725926
| 0
| 0.014815
| 0.162524
| 0.137867
| 0
| 0
| 0
| 0
| 0.144444
| 1
| 0.059259
| false
| 0
| 0.02963
| 0
| 0.092593
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e03dce32b1765c11eb1c9931b09be6113436d6fc
| 100
|
py
|
Python
|
confu/schema/__init__.py
|
roysc/confu
|
3d0f5e7036f0c8f5502dd455f32e08202ce72a75
|
[
"Apache-2.0"
] | null | null | null |
confu/schema/__init__.py
|
roysc/confu
|
3d0f5e7036f0c8f5502dd455f32e08202ce72a75
|
[
"Apache-2.0"
] | null | null | null |
confu/schema/__init__.py
|
roysc/confu
|
3d0f5e7036f0c8f5502dd455f32e08202ce72a75
|
[
"Apache-2.0"
] | null | null | null |
from confu.schema.core import * # noqa
from confu.schema.inet import Email, Url, IpAddress # noqa
| 33.333333
| 59
| 0.75
| 15
| 100
| 5
| 0.666667
| 0.24
| 0.4
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16
| 100
| 2
| 60
| 50
| 0.892857
| 0.09
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
e040973b7db6a6c8980bc5446441dff17669cf14
| 14,501
|
py
|
Python
|
tests/test_views.py
|
ivoire/KissCache
|
ccdf6454bccc29dbd0cd30b00c95a37fa3680b2c
|
[
"MIT"
] | null | null | null |
tests/test_views.py
|
ivoire/KissCache
|
ccdf6454bccc29dbd0cd30b00c95a37fa3680b2c
|
[
"MIT"
] | null | null | null |
tests/test_views.py
|
ivoire/KissCache
|
ccdf6454bccc29dbd0cd30b00c95a37fa3680b2c
|
[
"MIT"
] | 1
|
2020-09-29T21:05:43.000Z
|
2020-09-29T21:05:43.000Z
|
# -*- coding: utf-8 -*-
# vim: set ts=4
#
# Copyright 2019 Linaro Limited
#
# Author: Rémi Duraffort <remi.duraffort@linaro.org>
#
# SPDX-License-Identifier: MIT
import json
from django.http import FileResponse, JsonResponse
from django.http.response import StreamingHttpResponse
from django.urls import reverse
from django.utils import timezone
from kiss_cache.__about__ import __version__
from kiss_cache.models import Resource, Statistic
def test_index(client):
ret = client.get(reverse("home"))
assert len(ret.templates) == 2
assert ret.templates[0].name == "kiss_cache/index.html"
assert ret.templates[1].name == "kiss_cache/base.html"
assert ret.context["api_url"] == "http://testserver/api/v1/fetch/"
assert ret.context["version"] == __version__
def test_help(client):
ret = client.get(reverse("help"))
assert len(ret.templates) == 2
assert ret.templates[0].name == "kiss_cache/help.html"
assert ret.templates[1].name == "kiss_cache/base.html"
assert ret.context["ALLOWED_NETWORKS"] == []
assert ret.context["user_ip"] == "127.0.0.1"
assert ret.context["user_ip_allowed"] == True
assert ret.context["api_url"] == "http://testserver/api/v1/fetch/"
def test_statistics(client, db, settings):
# Empty page
ret = client.get(reverse("statistics"))
assert len(ret.templates) == 2
assert ret.templates[0].name == "kiss_cache/statistics.html"
assert ret.templates[1].name == "kiss_cache/base.html"
assert ret.context["total_size"] == 0
assert ret.context["quota"] == settings.RESOURCE_QUOTA
assert ret.context["progress"] == 0
assert ret.context["progress_status"] == "success"
assert ret.context["scheduled_count"] == 0
assert ret.context["downloading_count"] == 0
assert ret.context["successes_count"] == 0
assert ret.context["failures_count"] == 0
assert ret.context["statistics_download"] == 0
assert ret.context["statistics_upload"] == 0
assert ret.context["statistics_requests"] == 0
# Create some resources
MEGA = 1024 * 1024
Resource.objects.create(url="http://example.com/1", state=Resource.STATE_SCHEDULED)
Resource.objects.create(
url="http://example.com/2",
state=Resource.STATE_DOWNLOADING,
content_length=111 * MEGA,
)
Resource.objects.create(
url="http://example.com/3",
state=Resource.STATE_FINISHED,
status_code=200,
content_length=222 * MEGA,
)
Resource.objects.create(
url="http://example.com/4",
state=Resource.STATE_FINISHED,
status_code=504,
content_length=333 * MEGA,
)
Statistic.objects.filter(stat=Statistic.STAT_DOWNLOAD).update(value=666 * MEGA)
Statistic.objects.filter(stat=Statistic.STAT_UPLOAD).update(value=2 * 666 * MEGA)
ret = client.get(reverse("statistics"))
assert len(ret.templates) == 2
assert ret.templates[0].name == "kiss_cache/statistics.html"
assert ret.templates[1].name == "kiss_cache/base.html"
assert ret.context["total_size"] == 666 * MEGA
assert ret.context["quota"] == settings.RESOURCE_QUOTA
assert ret.context["progress"] == 33
assert ret.context["progress_status"] == "success"
assert ret.context["scheduled_count"] == 1
assert ret.context["downloading_count"] == 1
assert ret.context["successes_count"] == 1
assert ret.context["failures_count"] == 1
assert ret.context["statistics_download"] == 666 * MEGA
assert ret.context["statistics_upload"] == 2 * 666 * MEGA
assert ret.context["statistics_requests"] == 0
def test_resources(client, db):
# Empty page
ret = client.get(reverse("resources.successes"))
assert len(ret.templates) == 2
assert ret.templates[0].name == "kiss_cache/resources.html"
assert ret.templates[1].name == "kiss_cache/base.html"
assert len(ret.context["resources"].object_list) == 0
assert ret.context["resources"].number == 1
assert ret.context["state"] == "successes"
assert ret.context["url_name"] == "resources.successes"
assert ret.context["scheduled_count"] == 0
assert ret.context["downloading_count"] == 0
assert ret.context["successes_count"] == 0
assert ret.context["failures_count"] == 0
# Create some resources
MEGA = 1024 * 1024
Resource.objects.create(url="http://example.com/1", state=Resource.STATE_SCHEDULED)
Resource.objects.create(
url="http://example.com/2",
state=Resource.STATE_DOWNLOADING,
content_length=111 * MEGA,
)
Resource.objects.create(
url="http://example.com/3",
state=Resource.STATE_FINISHED,
status_code=200,
content_length=222 * MEGA,
)
Resource.objects.create(
url="http://example.com/4",
state=Resource.STATE_FINISHED,
status_code=504,
content_length=333 * MEGA,
)
Statistic.objects.filter(stat=Statistic.STAT_DOWNLOAD).update(value=666 * MEGA)
Statistic.objects.filter(stat=Statistic.STAT_UPLOAD).update(value=2 * 666 * MEGA)
# Test each pages
ret = client.get(reverse("resources.scheduled"))
assert len(ret.templates) == 2
assert ret.templates[0].name == "kiss_cache/resources.html"
assert ret.templates[1].name == "kiss_cache/base.html"
assert len(ret.context["resources"].object_list) == 1
assert ret.context["resources"].object_list[0].url == "http://example.com/1"
assert ret.context["resources"].number == 1
assert ret.context["state"] == "scheduled"
assert ret.context["url_name"] == "resources.scheduled"
assert ret.context["scheduled_count"] == 1
assert ret.context["downloading_count"] == 1
assert ret.context["successes_count"] == 1
assert ret.context["failures_count"] == 1
ret = client.get(reverse("resources.downloading"))
assert len(ret.templates) == 2
assert ret.templates[0].name == "kiss_cache/resources.html"
assert ret.templates[1].name == "kiss_cache/base.html"
assert len(ret.context["resources"].object_list) == 1
assert ret.context["resources"].object_list[0].url == "http://example.com/2"
assert ret.context["resources"].number == 1
assert ret.context["state"] == "downloading"
assert ret.context["url_name"] == "resources.downloading"
assert ret.context["scheduled_count"] == 1
assert ret.context["downloading_count"] == 1
assert ret.context["successes_count"] == 1
assert ret.context["failures_count"] == 1
ret = client.get(reverse("resources.successes"))
assert len(ret.templates) == 2
assert ret.templates[0].name == "kiss_cache/resources.html"
assert ret.templates[1].name == "kiss_cache/base.html"
assert len(ret.context["resources"].object_list) == 1
assert ret.context["resources"].object_list[0].url == "http://example.com/3"
assert ret.context["resources"].number == 1
assert ret.context["state"] == "successes"
assert ret.context["url_name"] == "resources.successes"
assert ret.context["scheduled_count"] == 1
assert ret.context["downloading_count"] == 1
assert ret.context["successes_count"] == 1
assert ret.context["failures_count"] == 1
ret = client.get(reverse("resources.failures"))
assert len(ret.templates) == 2
assert ret.templates[0].name == "kiss_cache/resources.html"
assert ret.templates[1].name == "kiss_cache/base.html"
assert len(ret.context["resources"].object_list) == 1
assert ret.context["resources"].object_list[0].url == "http://example.com/4"
assert ret.context["resources"].number == 1
assert ret.context["state"] == "failures"
assert ret.context["url_name"] == "resources.failures"
assert ret.context["scheduled_count"] == 1
assert ret.context["downloading_count"] == 1
assert ret.context["successes_count"] == 1
assert ret.context["failures_count"] == 1
# Test errors
ret = client.get(reverse("resources.failures") + "?order=-usag")
assert ret.status_code == 400
ret = client.get(reverse("resources.failures") + "10/")
assert ret.status_code == 404
def test_api_health(client, mocker, settings, tmpdir):
settings.SHUTDOWN_PATH = str(tmpdir / "shutdown")
ret = client.get(reverse("api.health"))
(tmpdir / "shutdown").write_text("", encoding="utf-8")
ret = client.get(reverse("api.health"))
assert ret.status_code == 503
def test_api_fetch(client, db, mocker, settings, tmpdir):
URL = "https://example.com"
def mocked_fetch(url):
assert url == URL
Resource.objects.filter(url=URL).update(
state=Resource.STATE_FINISHED,
status_code=200,
content_length=12,
content_type="text/html; charset=UTF-8",
)
path = Resource.objects.get(url=URL).path
assert (
path == "10/0680ad546ce6a577f42f52df33b4cfdca756859e664b8d7de329b150d09ce9"
)
(tmpdir / "10").mkdir()
(
tmpdir / "10/0680ad546ce6a577f42f52df33b4cfdca756859e664b8d7de329b150d09ce9"
).write_text("Hello world!", encoding="utf-8")
settings.DOWNLOAD_PATH = str(tmpdir)
fetch = mocker.patch("kiss_cache.tasks.fetch.delay", mocked_fetch)
ret = client.get(f"{reverse('api.fetch')}?url={URL}&ttl=42d")
assert isinstance(ret, FileResponse)
assert ret._closable_objects[0].name == str(
tmpdir / "10/0680ad546ce6a577f42f52df33b4cfdca756859e664b8d7de329b150d09ce9"
)
assert ret.status_code == 200
assert ret._headers["content-type"] == ("Content-Type", "text/html; charset=UTF-8")
assert ret._headers["content-length"] == ("Content-Length", "12")
assert next(ret.streaming_content) == b"Hello world!"
# Download a second time
ret = client.get(f"{reverse('api.fetch')}?url={URL}&ttl=42d")
assert isinstance(ret, FileResponse)
assert ret._closable_objects[0].name == str(
tmpdir / "10/0680ad546ce6a577f42f52df33b4cfdca756859e664b8d7de329b150d09ce9"
)
assert ret.status_code == 200
assert ret._headers["content-type"] == ("Content-Type", "text/html; charset=UTF-8")
assert ret._headers["content-length"] == ("Content-Length", "12")
assert next(ret.streaming_content) == b"Hello world!"
# Download a third time and set a shorter ttl
now = timezone.now()
mocker.patch("django.utils.timezone.now", lambda: now)
ret = client.get(f"{reverse('api.fetch')}?url={URL}&ttl=4d")
assert isinstance(ret, FileResponse)
assert ret._closable_objects[0].name == str(
tmpdir / "10/0680ad546ce6a577f42f52df33b4cfdca756859e664b8d7de329b150d09ce9"
)
assert ret.status_code == 200
assert ret._headers["content-type"] == ("Content-Type", "text/html; charset=UTF-8")
assert ret._headers["content-length"] == ("Content-Length", "12")
assert next(ret.streaming_content) == b"Hello world!"
assert Resource.objects.get(url=URL).ttl == 345_600
# Download a forth time: set the Content-Disposition
ret = client.get(f"{reverse('api.fetch')}kernel?url={URL}")
assert isinstance(ret, FileResponse)
assert ret._closable_objects[0].name == str(
tmpdir / "10/0680ad546ce6a577f42f52df33b4cfdca756859e664b8d7de329b150d09ce9"
)
assert ret.status_code == 200
assert ret._headers["content-type"] == ("Content-Type", "text/html; charset=UTF-8")
assert ret._headers["content-length"] == ("Content-Length", "12")
assert ret._headers["content-disposition"] == (
"Content-Disposition",
"attachment; filename=kernel",
)
assert next(ret.streaming_content) == b"Hello world!"
# Download a fifth time with status_code = 404
Resource.objects.filter(url=URL).update(status_code=404)
ret = client.get(f"{reverse('api.fetch')}?url={URL}")
assert ret.status_code == 404
def test_api_fetch_streaming(client, db, mocker, settings, tmpdir):
URL = "https://example.com"
def mocked_fetch(url):
assert url == URL
Resource.objects.filter(url=URL).update(
state=Resource.STATE_DOWNLOADING,
status_code=200,
content_length=12,
content_type="text/html; charset=UTF-8",
)
path = Resource.objects.get(url=URL).path
assert (
path == "10/0680ad546ce6a577f42f52df33b4cfdca756859e664b8d7de329b150d09ce9"
)
(tmpdir / "10").mkdir()
(
tmpdir / "10/0680ad546ce6a577f42f52df33b4cfdca756859e664b8d7de329b150d09ce9"
).write_text("Hello world!", encoding="utf-8")
settings.DOWNLOAD_PATH = str(tmpdir)
fetch = mocker.patch("kiss_cache.tasks.fetch.delay", mocked_fetch)
ret = client.get(f"{reverse('api.fetch')}ramdisk.tgz?url={URL}&ttl=42d")
assert isinstance(ret, StreamingHttpResponse)
assert ret.status_code == 200
assert ret._headers["content-type"] == ("Content-Type", "text/html; charset=UTF-8")
assert ret._headers["content-length"] == ("Content-Length", "12")
assert ret._headers["content-disposition"] == (
"Content-Disposition",
"attachment; filename=ramdisk.tgz",
)
assert next(ret.streaming_content) == b"Hello world!"
def test_api_fetch_errors(client, db, mocker):
URL = "https://example.com"
# missing url
ret = client.get(reverse("api.fetch"))
assert ret.status_code == 400
# Invalid ttl format
ret = client.get(f"{reverse('api.fetch')}?url={URL}&ttl=1k")
assert ret.status_code == 400
# Over quota
mocker.patch("kiss_cache.models.Resource.is_over_quota", lambda: True)
ret = client.get(f"{reverse('api.fetch')}?url={URL}")
assert ret.status_code == 507
def test_api_status(client, db):
ret = client.get(reverse("api.status"))
assert ret.status_code == 200
assert isinstance(ret, JsonResponse)
data = json.loads(ret.content)
assert data["disk_usage"] == 0
assert data["disk_usage_percent"] == 0
assert data["disk_quota"] == 2_147_483_648
assert data["instance"] == "http://testserver/"
assert data["resources_scheduled"] == 0
assert data["resources_downloading"] == 0
assert data["resources_successes"] == 0
assert data["resources_failures"] == 0
assert data["resources_usage"] == 0
assert data["statistics_successes"] == 0
assert data["statistics_failures"] == 0
assert data["statistics_download"] == 0
assert data["statistics_upload"] == 0
assert data["statistics_requests"] == 0
assert data["version"] == __version__
assert "timestamp" in data
| 39.837912
| 88
| 0.673057
| 1,795
| 14,501
| 5.317549
| 0.108078
| 0.107491
| 0.11231
| 0.048088
| 0.821792
| 0.782504
| 0.738397
| 0.732216
| 0.723311
| 0.722473
| 0
| 0.051545
| 0.178539
| 14,501
| 363
| 89
| 39.947658
| 0.749748
| 0.030688
| 0
| 0.656566
| 0
| 0
| 0.278538
| 0.086433
| 0
| 0
| 0
| 0
| 0.538721
| 1
| 0.037037
| false
| 0
| 0.023569
| 0
| 0.060606
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0451c091d9b3c761dc314533fc9aac3d7cab63c
| 20,008
|
py
|
Python
|
ironic/tests/unit/drivers/modules/irmc/test_power.py
|
NaohiroTamura/ironic
|
1fcb6c52a22c9c025dbf27931720ce2eda08704f
|
[
"Apache-2.0"
] | null | null | null |
ironic/tests/unit/drivers/modules/irmc/test_power.py
|
NaohiroTamura/ironic
|
1fcb6c52a22c9c025dbf27931720ce2eda08704f
|
[
"Apache-2.0"
] | null | null | null |
ironic/tests/unit/drivers/modules/irmc/test_power.py
|
NaohiroTamura/ironic
|
1fcb6c52a22c9c025dbf27931720ce2eda08704f
|
[
"Apache-2.0"
] | 1
|
2022-03-25T14:26:10.000Z
|
2022-03-25T14:26:10.000Z
|
# Copyright 2015 FUJITSU LIMITED
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Test class for iRMC Power Driver
"""
import mock
from oslo_utils import uuidutils
from ironic.common import exception
from ironic.common import states
from ironic.conductor import task_manager
from ironic.drivers.modules.irmc import boot as irmc_boot
from ironic.drivers.modules.irmc import common as irmc_common
from ironic.drivers.modules.irmc import power as irmc_power
from ironic.tests.unit.conductor import mgr_utils
from ironic.tests.unit.db import base as db_base
from ironic.tests.unit.db import utils as db_utils
from ironic.tests.unit.objects import utils as obj_utils
INFO_DICT = db_utils.get_test_irmc_info()
class IRMCPowerInternalMethodsTestCase(db_base.DbTestCase):
def setUp(self):
super(IRMCPowerInternalMethodsTestCase, self).setUp()
mgr_utils.mock_the_extension_manager(driver='fake_irmc')
driver_info = INFO_DICT
self.node = db_utils.create_test_node(
driver='fake_irmc',
driver_info=driver_info,
instance_uuid=uuidutils.generate_uuid())
def test__is_expected_power_state(self):
target_state = states.SOFT_POWER_OFF
boot_status_value = irmc_power.BOOT_STATUS_VALUE['unknown']
self.assertTrue(irmc_power._is_expected_power_state(
target_state, boot_status_value))
target_state = states.SOFT_POWER_OFF
boot_status_value = irmc_power.BOOT_STATUS_VALUE['off']
self.assertTrue(irmc_power._is_expected_power_state(
target_state, boot_status_value))
target_state = states.SOFT_REBOOT
boot_status_value = irmc_power.BOOT_STATUS_VALUE['os-running']
self.assertTrue(irmc_power._is_expected_power_state(
target_state, boot_status_value))
target_state = states.SOFT_POWER_OFF
boot_status_value = irmc_power.BOOT_STATUS_VALUE['os-running']
self.assertFalse(irmc_power._is_expected_power_state(
target_state, boot_status_value))
@mock.patch('eventlet.greenthread.sleep', lambda n: None)
@mock.patch('ironic.drivers.modules.irmc.power.snmp.SNMPClient',
spec_set=True, autospec=True)
def test__wait_power_state_soft_power_off(self, snmpclient_mock):
target_state = states.SOFT_POWER_OFF
self.config(snmp_polling_interval=1, group='irmc')
self.config(soft_power_off_timeout=3, group='conductor')
snmpclient_mock.return_value = mock.Mock(
**{'get.side_effect': [8, 8, 2]})
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
irmc_power._wait_power_state(task, target_state)
task.node.refresh()
self.assertIsNone(task.node.last_error)
self.assertEqual(states.POWER_OFF, task.node.power_state)
self.assertEqual(states.NOSTATE, task.node.target_power_state)
@mock.patch('eventlet.greenthread.sleep', lambda n: None)
@mock.patch('ironic.drivers.modules.irmc.power.snmp.SNMPClient',
spec_set=True, autospec=True)
def test__wait_power_state_soft_reboot(self, snmpclient_mock):
target_state = states.SOFT_REBOOT
self.config(snmp_polling_interval=1, group='irmc')
self.config(soft_power_off_timeout=3, group='conductor')
snmpclient_mock.return_value = mock.Mock(
**{'get.side_effect': [10, 6, 8]})
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
irmc_power._wait_power_state(task, target_state)
task.node.refresh()
self.assertIsNone(task.node.last_error)
self.assertEqual(states.POWER_ON, task.node.power_state)
self.assertEqual(states.NOSTATE, task.node.target_power_state)
@mock.patch('eventlet.greenthread.sleep', lambda n: None)
@mock.patch('ironic.drivers.modules.irmc.power.snmp.SNMPClient',
spec_set=True, autospec=True)
def test__wait_power_state_timeout(self, snmpclient_mock):
target_state = states.SOFT_POWER_OFF
self.config(snmp_polling_interval=1, group='irmc')
self.config(soft_power_off_timeout=2, group='conductor')
snmpclient_mock.return_value = mock.Mock(
**{'get.side_effect': [8, 8, 8]})
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.IRMCOperationError,
irmc_power._wait_power_state,
task,
target_state,
timeout=None)
task.node.refresh()
self.assertIsNotNone(task.node.last_error)
self.assertEqual(states.ERROR, task.node.power_state)
self.assertEqual(states.NOSTATE, task.node.target_power_state)
@mock.patch.object(irmc_power, '_wait_power_state', spec_set=True,
autospec=True)
@mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
autospec=True)
@mock.patch.object(irmc_boot, 'attach_boot_iso_if_needed')
def test__set_power_state_power_on_ok(
self,
attach_boot_iso_if_needed_mock,
get_irmc_client_mock,
_wait_power_state_mock):
irmc_client = get_irmc_client_mock.return_value
target_state = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
irmc_power._set_power_state(task, target_state)
attach_boot_iso_if_needed_mock.assert_called_once_with(task)
irmc_client.assert_called_once_with(irmc_power.scci.POWER_ON)
self.assertFalse(_wait_power_state_mock.called)
@mock.patch.object(irmc_power, '_wait_power_state', spec_set=True,
autospec=True)
@mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
autospec=True)
def test__set_power_state_power_off_ok(self,
get_irmc_client_mock,
_wait_power_state_mock):
irmc_client = get_irmc_client_mock.return_value
target_state = states.POWER_OFF
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
irmc_power._set_power_state(task, target_state)
irmc_client.assert_called_once_with(irmc_power.scci.POWER_OFF)
self.assertFalse(_wait_power_state_mock.called)
@mock.patch.object(irmc_power, '_wait_power_state', spec_set=True,
autospec=True)
@mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
autospec=True)
@mock.patch.object(irmc_boot, 'attach_boot_iso_if_needed')
def test__set_power_state_reboot_ok(
self,
attach_boot_iso_if_needed_mock,
get_irmc_client_mock,
_wait_power_state_mock):
irmc_client = get_irmc_client_mock.return_value
target_state = states.REBOOT
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
irmc_power._set_power_state(task, target_state)
attach_boot_iso_if_needed_mock.assert_called_once_with(task)
irmc_client.assert_called_once_with(irmc_power.scci.POWER_RESET)
self.assertFalse(_wait_power_state_mock.called)
@mock.patch.object(irmc_power, '_wait_power_state', spec_set=True,
autospec=True)
@mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
autospec=True)
@mock.patch.object(irmc_boot, 'attach_boot_iso_if_needed')
def test__set_power_state_soft_reboot_ok(
self,
attach_boot_iso_if_needed_mock,
get_irmc_client_mock,
_wait_power_state_mock):
irmc_client = get_irmc_client_mock.return_value
target_state = states.SOFT_REBOOT
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
irmc_power._set_power_state(task, target_state)
attach_boot_iso_if_needed_mock.assert_called_once_with(task)
irmc_client.assert_called_once_with(irmc_power.scci.POWER_SOFT_CYCLE)
_wait_power_state_mock.assert_has_calls(
[mock.call(task, states.SOFT_POWER_OFF, timeout=None),
mock.call(task, states.SOFT_REBOOT, timeout=None)])
@mock.patch.object(irmc_power, '_wait_power_state', spec_set=True,
autospec=True)
@mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
autospec=True)
@mock.patch.object(irmc_boot, 'attach_boot_iso_if_needed')
def test__set_power_state_soft_power_off_ok(self,
attach_boot_iso_if_needed_mock,
get_irmc_client_mock,
_wait_power_state_mock):
irmc_client = get_irmc_client_mock.return_value
target_state = states.SOFT_POWER_OFF
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
irmc_power._set_power_state(task, target_state)
self.assertFalse(attach_boot_iso_if_needed_mock.called)
irmc_client.assert_called_once_with(irmc_power.scci.POWER_SOFT_OFF)
_wait_power_state_mock.assert_called_once_with(task, target_state,
timeout=None)
@mock.patch.object(irmc_power, '_wait_power_state', spec_set=True,
autospec=True)
@mock.patch.object(irmc_boot, 'attach_boot_iso_if_needed')
def test__set_power_state_invalid_target_state(
self,
attach_boot_iso_if_needed_mock,
_wait_power_state_mock):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
irmc_power._set_power_state,
task,
states.ERROR)
self.assertFalse(attach_boot_iso_if_needed_mock.called)
self.assertFalse(_wait_power_state_mock.called)
@mock.patch.object(irmc_power, '_wait_power_state', spec_set=True,
autospec=True)
@mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
autospec=True)
@mock.patch.object(irmc_boot, 'attach_boot_iso_if_needed')
def test__set_power_state_scci_exception(self,
attach_boot_iso_if_needed_mock,
get_irmc_client_mock,
_wait_power_state_mock):
irmc_client = get_irmc_client_mock.return_value
irmc_client.side_effect = Exception()
irmc_power.scci.SCCIClientError = Exception
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.IRMCOperationError,
irmc_power._set_power_state,
task,
states.POWER_ON)
attach_boot_iso_if_needed_mock.assert_called_once_with(
task)
self.assertFalse(_wait_power_state_mock.called)
@mock.patch.object(irmc_power, '_wait_power_state', spec_set=True,
autospec=True)
@mock.patch.object(irmc_common, 'get_irmc_client', spec_set=True,
autospec=True)
@mock.patch.object(irmc_boot, 'attach_boot_iso_if_needed')
def test__set_power_state_snmp_exception(self,
attach_boot_iso_if_needed_mock,
get_irmc_client_mock,
_wait_power_state_mock):
target_state = states.SOFT_REBOOT
_wait_power_state_mock.side_effect = exception.SNMPFailure(
"fake exception")
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.IRMCOperationError,
irmc_power._set_power_state,
task,
target_state)
attach_boot_iso_if_needed_mock.assert_called_once_with(
task)
get_irmc_client_mock.return_value.assert_called_once_with(
irmc_power.STATES_MAP[target_state])
_wait_power_state_mock.assert_called_once_with(
task, states.SOFT_POWER_OFF, timeout=None)
class IRMCPowerTestCase(db_base.DbTestCase):
def setUp(self):
super(IRMCPowerTestCase, self).setUp()
driver_info = INFO_DICT
mgr_utils.mock_the_extension_manager(driver="fake_irmc")
self.node = obj_utils.create_test_node(self.context,
driver='fake_irmc',
driver_info=driver_info)
def test_get_properties(self):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
properties = task.driver.get_properties()
for prop in irmc_common.COMMON_PROPERTIES:
self.assertIn(prop, properties)
@mock.patch.object(irmc_common, 'parse_driver_info', spec_set=True,
autospec=True)
def test_validate(self, mock_drvinfo):
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
task.driver.power.validate(task)
mock_drvinfo.assert_called_once_with(task.node)
@mock.patch.object(irmc_common, 'parse_driver_info', spec_set=True,
autospec=True)
def test_validate_fail(self, mock_drvinfo):
side_effect = exception.InvalidParameterValue("Invalid Input")
mock_drvinfo.side_effect = side_effect
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
task.driver.power.validate,
task)
@mock.patch('ironic.drivers.modules.irmc.power.ipmitool.IPMIPower',
spec_set=True, autospec=True)
def test_get_power_state(self, mock_IPMIPower):
ipmi_power = mock_IPMIPower.return_value
ipmi_power.get_power_state.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertEqual(states.POWER_ON,
task.driver.power.get_power_state(task))
ipmi_power.get_power_state.assert_called_once_with(task)
@mock.patch.object(irmc_power, '_set_power_state', spec_set=True,
autospec=True)
def test_set_power_state(self, mock_set_power):
mock_set_power.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.power.set_power_state(task, states.POWER_ON)
mock_set_power.assert_called_once_with(task, states.POWER_ON,
timeout=None)
@mock.patch.object(irmc_power, '_set_power_state', spec_set=True,
autospec=True)
def test_set_power_state_timeout(self, mock_set_power):
mock_set_power.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
task.driver.power.set_power_state(task, states.POWER_ON,
timeout=2)
mock_set_power.assert_called_once_with(task, states.POWER_ON,
timeout=2)
@mock.patch.object(irmc_power, '_set_power_state', spec_set=True,
autospec=True)
@mock.patch.object(irmc_power.IRMCPower, 'get_power_state', spec_set=True,
autospec=True)
def test_reboot_reboot(self, mock_get_power, mock_set_power):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
mock_get_power.return_value = states.POWER_ON
task.driver.power.reboot(task)
mock_get_power.assert_called_once_with(
task.driver.power, task)
mock_set_power.assert_called_once_with(task, states.REBOOT,
timeout=None)
@mock.patch.object(irmc_power, '_set_power_state', spec_set=True,
autospec=True)
@mock.patch.object(irmc_power.IRMCPower, 'get_power_state', spec_set=True,
autospec=True)
def test_reboot_reboot_timeout(self, mock_get_power, mock_set_power):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
mock_get_power.return_value = states.POWER_ON
task.driver.power.reboot(task, timeout=2)
mock_get_power.assert_called_once_with(
task.driver.power, task)
mock_set_power.assert_called_once_with(task, states.REBOOT,
timeout=2)
@mock.patch.object(irmc_power, '_set_power_state', spec_set=True,
autospec=True)
@mock.patch.object(irmc_power.IRMCPower, 'get_power_state', spec_set=True,
autospec=True)
def test_reboot_power_on(self, mock_get_power, mock_set_power):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
mock_get_power.return_value = states.POWER_OFF
task.driver.power.reboot(task)
mock_get_power.assert_called_once_with(
task.driver.power, task)
mock_set_power.assert_called_once_with(task, states.POWER_ON,
timeout=None)
@mock.patch.object(irmc_power, '_set_power_state', spec_set=True,
autospec=True)
@mock.patch.object(irmc_power.IRMCPower, 'get_power_state', spec_set=True,
autospec=True)
def test_reboot_power_on_timeout(self, mock_get_power, mock_set_power):
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
mock_get_power.return_value = states.POWER_OFF
task.driver.power.reboot(task, timeout=2)
mock_get_power.assert_called_once_with(
task.driver.power, task)
mock_set_power.assert_called_once_with(task, states.POWER_ON,
timeout=2)
| 48.681265
| 79
| 0.628749
| 2,397
| 20,008
| 4.869837
| 0.080935
| 0.065108
| 0.043691
| 0.055341
| 0.855992
| 0.835347
| 0.80442
| 0.769297
| 0.765185
| 0.742397
| 0
| 0.002122
| 0.293433
| 20,008
| 410
| 80
| 48.8
| 0.823584
| 0.029338
| 0
| 0.720798
| 0
| 0
| 0.054636
| 0.023298
| 0
| 0
| 0
| 0
| 0.150997
| 1
| 0.068376
| false
| 0
| 0.034188
| 0
| 0.108262
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
0ee793bedbb6d5752c0c932911f05fb8dd8a570b
| 2,831
|
py
|
Python
|
tests/server/test_middleware.py
|
dapper91/pjrpc
|
cbf5c9e072935f9e59d306244c67499a4ad3e0f5
|
[
"Unlicense"
] | 10
|
2020-03-15T06:41:58.000Z
|
2022-03-17T08:55:53.000Z
|
tests/server/test_middleware.py
|
dapper91/pjrpc
|
cbf5c9e072935f9e59d306244c67499a4ad3e0f5
|
[
"Unlicense"
] | 41
|
2019-11-16T09:57:54.000Z
|
2022-03-31T17:34:13.000Z
|
tests/server/test_middleware.py
|
dapper91/pjrpc
|
cbf5c9e072935f9e59d306244c67499a4ad3e0f5
|
[
"Unlicense"
] | 1
|
2022-03-17T08:21:28.000Z
|
2022-03-17T08:21:28.000Z
|
import json
import pjrpc
from pjrpc.server import dispatcher as pjrpc_dispatcher
def test_middleware(mocker):
test_result = 'the result'
test_request = pjrpc.common.Request('test_method', params=dict(param='param'), id=1)
test_response = pjrpc.common.Response(id=1, result=test_result)
test_context = object()
middleware_call_order = []
def test_method(context, param):
assert context is test_context
assert param == 'param'
return test_result
def test_middleware1(request, context, handler):
middleware_call_order.append(test_middleware1)
assert request == test_request
assert context is test_context
return handler(request, context)
def test_middleware2(request, context, handler):
middleware_call_order.append(test_middleware2)
assert request == test_request
assert context is test_context
return handler(request, context)
dispatcher = pjrpc_dispatcher.Dispatcher(middlewares=(test_middleware1, test_middleware2))
dispatcher.add(test_method, 'test_method', 'context')
request_text = json.dumps(test_request.to_json())
response_text = dispatcher.dispatch(request_text, test_context)
actual_response = pjrpc.common.Response.from_json(json.loads(response_text))
assert actual_response == test_response
assert middleware_call_order == [test_middleware1, test_middleware2]
async def test_async_middleware(mocker):
test_result = 'the result'
test_request = pjrpc.common.Request('test_method', params=dict(param='param'), id=1)
test_response = pjrpc.common.Response(id=1, result=test_result)
test_context = object()
middleware_call_order = []
async def test_method(context, param):
assert context is test_context
assert param == 'param'
return test_result
async def test_middleware1(request, context, handler):
middleware_call_order.append(test_middleware1)
assert request == test_request
assert context is test_context
return await handler(request, context)
async def test_middleware2(request, context, handler):
middleware_call_order.append(test_middleware2)
assert request == test_request
assert context is test_context
return await handler(request, context)
dispatcher = pjrpc_dispatcher.AsyncDispatcher(middlewares=(test_middleware1, test_middleware2))
dispatcher.add(test_method, 'test_method', 'context')
request_text = json.dumps(test_request.to_json())
response_text = await dispatcher.dispatch(request_text, test_context)
actual_response = pjrpc.common.Response.from_json(json.loads(response_text))
assert actual_response == test_response
assert middleware_call_order == [test_middleware1, test_middleware2]
| 35.835443
| 99
| 0.735429
| 338
| 2,831
| 5.893491
| 0.130178
| 0.055221
| 0.076305
| 0.057229
| 0.930723
| 0.930723
| 0.905622
| 0.905622
| 0.905622
| 0.905622
| 0
| 0.008647
| 0.182974
| 2,831
| 78
| 100
| 36.294872
| 0.852572
| 0
| 0
| 0.736842
| 0
| 0
| 0.034617
| 0
| 0
| 0
| 0
| 0
| 0.280702
| 1
| 0.070175
| false
| 0
| 0.052632
| 0
| 0.22807
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
161711053cd336ce073b4994c3a6a64754a0b514
| 11,744
|
py
|
Python
|
tp_epi/tp_epi/report/payment_received_against_orders/payment_received_against_orders.py
|
mntechnique/tp_epi
|
078a2590b50c759555880f143841ef74071b83bc
|
[
"MIT"
] | null | null | null |
tp_epi/tp_epi/report/payment_received_against_orders/payment_received_against_orders.py
|
mntechnique/tp_epi
|
078a2590b50c759555880f143841ef74071b83bc
|
[
"MIT"
] | null | null | null |
tp_epi/tp_epi/report/payment_received_against_orders/payment_received_against_orders.py
|
mntechnique/tp_epi
|
078a2590b50c759555880f143841ef74071b83bc
|
[
"MIT"
] | null | null | null |
# Copyright (c) 2013, MN Technique and contributors
# For license information, please see license.txt
from __future__ import unicode_literals
import frappe
from frappe import _
def execute(filters=None):
columns, data = [], []
columns = get_columns()
data = get_data()
return columns, data
# def get_columns():
# return [
# _("Amazon Order ID") + "::150",
# _("Date") + ":Date:80",
# _("Customer Name") + "::150",
# _("SKU") + "::75",
# _("Title") + "::200",
# _("Qty") + "::30",
# _("MRP") + ":Currency:75",
# _("BizOne Principal") + ":Currency:75",
# _("Amazon Principal") + ":Currency:75",
# _("Shipping") + ":Currency:75",
# _("Commission") + ":Currency:75",
# _("Commission Tax") + ":Currency:75",
# _("Fixed Closing Fee") + ":Currency:75",
# _("Fixed Closing Fee Tax") + ":Currency:75",
# _("Shipping Commission") + ":Currency:75",
# _("Shipping Commission Tax") + ":Currency:75",
# _("Received") + ":Currency:75",
# _("% Realization") + ":Float:75",
# _("Easy Shipping Charges") + ":Currency:85",
# _("Easy Shipping Charges Tax") + ":Currency:85",
# _("Easy Shipping Charges Total") + ":Currency:85",
# _("Remark") + ":HTML:200"
# ]
# def get_data():
# data = []
# payments = frappe.get_all("EPI Amazon Order Payment", fields=['*'], order_by="posting_date DESC")
# print payments
# for payment in payments:
# #print payment.amazon_order_id
# #For fetching title.
# sales_order_id = frappe.db.get_value("Sales Order", {"epi_amazon_order_id": payment.amazon_order_id}, "name")
# customer = frappe.db.get_value("Sales Order", {"epi_amazon_order_id": payment.amazon_order_id}, "customer")
# easy_ship_charges = frappe.db.get_value("Sales Taxes and Charges", {"parent": payment.name, "description": "Amazon Easy Ship Charges"}, "tax_amount")
# easy_ship_charges_tax = frappe.db.get_value("Sales Taxes and Charges", {"parent": payment.name, "description": "Amazon Easy Ship Charges Tax"}, "tax_amount")
# #Calculate Total Easy Ship
# easy_ship_charges_total = easy_ship_charges + easy_ship_charges_tax
# row = []
# row.append(payment.amazon_order_id)
# row.append(payment.posting_date)
# row.append(customer)
# row.append("")
# row.append("")
# row.append("")
# row.append("")
# row.append("")
# row.append("")
# row.append("")
# row.append("")
# row.append("")
# row.append("")
# row.append("")
# row.append("")
# row.append("")
# row.append("")
# row.append("")
# row.append(easy_ship_charges)
# row.append(easy_ship_charges_tax)
# row.append(easy_ship_charges_total)
# row.append("")
# data.append(row)
# payment_details = frappe.get_all("EPI Amazon Order Payment Item", filters={'parent': payment.name}, fields=['*'])
# unique_sku_ids = get_distinct_sku_ids(payment_details)
# for unique_sku_id in unique_sku_ids:
# row = []
# title = frappe.db.get_value("Sales Order Item", {"parent": sales_order_id, "epi_ecom_sku_id": unique_sku_id}, "Description")
# principal = sum([float(x.amount) for x in payment_details if x.sku_id == unique_sku_id and x.description.lower() == "principal"])
# qty = sum([float(x.qty) for x in payment_details if x.sku_id == unique_sku_id and x.description.lower() == "principal"])
# selling_price = principal / qty
# mrp_bizone = frappe.db.get_value("Sales Order Item", {"parent": sales_order_id, "epi_ecom_sku_id": unique_sku_id}, "rate")
# principal_bizone = (mrp_bizone * qty)
# shipping = sum([float(x.amount) for x in payment_details if x.sku_id == unique_sku_id and x.description.lower() == "shipping"])
# commission = sum([float(x.amount) for x in payment_details if x.sku_id == unique_sku_id and x.description.lower() == "commission"])
# commission_tax = sum([float(x.amount) for x in payment_details if x.sku_id == unique_sku_id and x.description.lower() == "commission tax"])
# fixed_closing_fee = sum([float(x.amount) for x in payment_details if x.sku_id == unique_sku_id and x.description.lower() == "fixed closing fee"])
# fixed_closing_fee_tax = sum([float(x.amount) for x in payment_details if x.sku_id == unique_sku_id and x.description.lower() == "fixed closing fee tax"])
# shipping_commission = sum([float(x.amount) for x in payment_details if x.sku_id == unique_sku_id and x.description.lower() == "shipping commission"])
# shipping_commission_tax = sum([float(x.amount) for x in payment_details if x.sku_id == unique_sku_id and x.description.lower() == "shipping commission tax"])
# received = principal + shipping + commission + commission_tax + fixed_closing_fee + fixed_closing_fee_tax + shipping_commission + shipping_commission_tax
# if received > 0:
# #realization = (((received - shipping) / qty) / selling_price) * 100
# realization = (((received - shipping) / qty) / mrp_bizone) * 100
# else:
# realization = 0.0
# row.append("")
# row.append("")
# row.append("")
# row.append(unique_sku_id)
# row.append(title)
# row.append(qty)
# row.append(mrp_bizone)
# row.append(principal_bizone)
# row.append(principal)
# row.append(shipping)
# row.append(commission)
# row.append(commission_tax)
# row.append(fixed_closing_fee)
# row.append(fixed_closing_fee_tax)
# row.append(shipping_commission)
# row.append(shipping_commission_tax)
# row.append(received)
# row.append(realization)
# row.append("")
# row.append("")
# row.append("")
# row.append("")
# data.append(row)
# return data
def get_distinct_sku_ids(payment_items):
distinct_sku_ids = []
sku_ids = []
for item in payment_items:
sku_ids.append(item.sku_id)
distinct_sku_ids = set(sku_ids)
return distinct_sku_ids
def get_columns():
return [
_("Amazon Order ID") + "::150",
_("Date") + ":Date:80",
_("Customer Name") + "::150",
_("SKU") + "::75",
_("Title") + "::200",
_("Qty") + "::30",
_("MRP") + ":Currency:75",
_("BizOne Principal") + ":Currency:75",
_("Amazon Principal") + ":Currency:75",
_("Shipping") + ":Currency:75",
_("Commission") + ":Currency:75",
_("Commission Tax") + ":Currency:75",
_("Fixed Closing Fee") + ":Currency:75",
_("Fixed Closing Fee Tax") + ":Currency:75",
_("Shipping Commission") + ":Currency:75",
_("Shipping Commission Tax") + ":Currency:75",
_("Easy Shipping Per Title") + ":Currency:85",
_("Received") + ":Currency:75",
_("% Realization") + ":Float:75",
_("BizOne MRP Total") + ":Currency:85",
_("Easy Shipping Total") + ":Currency:85",
_("Ratio") + ":Float:85",
_("Remark") + ":HTML:200"
]
def get_data():
data = []
payments = frappe.get_all("EPI Amazon Order Payment",filters={"order_fulfillment_type": "MFN"}, fields=['*'], order_by="posting_date DESC")
#For each payment, get OrderID, Total of BizOneMRP by title,
for payment in payments:
sales_order_id = frappe.db.get_value("Sales Order", {"epi_amazon_order_id": payment.amazon_order_id}, "name")
customer = frappe.db.get_value("Sales Order", {"epi_amazon_order_id": payment.amazon_order_id}, "customer")
#Get Easy SHip Total
easy_ship_charges = frappe.db.get_value("Sales Taxes and Charges", {"parent": payment.name, "description": "Amazon Easy Ship Charges"}, "tax_amount")
easy_ship_charges_tax = frappe.db.get_value("Sales Taxes and Charges", {"parent": payment.name, "description": "Amazon Easy Ship Charges Tax"}, "tax_amount")
easy_ship_charges_total = easy_ship_charges + easy_ship_charges_tax
print sales_order_id
print "------"
print easy_ship_charges_total
print "------"
#Get MRP total for all titles.
payment_items = frappe.get_all("EPI Amazon Order Payment Item", filters={"parent": payment.name}, fields=['*'])
unique_sku_ids = get_distinct_sku_ids(payment_items)
mrp_bizone_order_total = frappe.db.get_value("Sales Order", {"epi_amazon_order_id": payment.amazon_order_id}, "total")
# total_mrp_bizone_for_order_titles = 0.0
# for unique_sku_id in unique_sku_ids:
# qty = [x.qty for x in payment_items if x.sku_id == unique_sku_id][0]
# mrp_bizone = frappe.db.get_value("Sales Order Item", {"parent": sales_order_id, "epi_ecom_sku_id": unique_sku_id}, "rate")
# total_mrp_bizone_for_order_titles += (qty * mrp_bizone)
#Get ratio of mrp_bizone to easy_ship_charges_total for that payment
ratio_mrp_bizone_to_easy_ship = (abs(easy_ship_charges_total) / mrp_bizone_order_total)
row = []
row.append(payment.amazon_order_id)
row.append(customer)
row.append("")
row.append("")
row.append("")
row.append("")
row.append("")
row.append("")
row.append("")
row.append("")
row.append("")
row.append("")
row.append("")
row.append("")
row.append("")
row.append("")
row.append("")
row.append("")
row.append("")
row.append(mrp_bizone_order_total)
row.append(easy_ship_charges_total)
row.append(ratio_mrp_bizone_to_easy_ship)
row.append("")
data.append(row)
for unique_sku_id in unique_sku_ids:
row = []
title = frappe.db.get_value("Sales Order Item", {"parent": sales_order_id, "epi_ecom_sku_id": unique_sku_id}, "Description")
principal = sum([float(x.amount) for x in payment_items if x.sku_id == unique_sku_id and x.description.lower() == "principal"])
qty = sum([float(x.qty) for x in payment_items if x.sku_id == unique_sku_id and x.description.lower() == "principal"])
selling_price = principal / qty
mrp_bizone = frappe.db.get_value("Sales Order Item", {"parent": sales_order_id, "epi_ecom_sku_id": unique_sku_id}, "rate")
principal_bizone = (mrp_bizone * qty)
shipping = sum([float(x.amount) for x in payment_items if x.sku_id == unique_sku_id and x.description.lower() == "shipping"])
commission = sum([float(x.amount) for x in payment_items if x.sku_id == unique_sku_id and x.description.lower() == "commission"])
commission_tax = sum([float(x.amount) for x in payment_items if x.sku_id == unique_sku_id and x.description.lower() == "commission tax"])
fixed_closing_fee = sum([float(x.amount) for x in payment_items if x.sku_id == unique_sku_id and x.description.lower() == "fixed closing fee"])
fixed_closing_fee_tax = sum([float(x.amount) for x in payment_items if x.sku_id == unique_sku_id and x.description.lower() == "fixed closing fee tax"])
shipping_commission = sum([float(x.amount) for x in payment_items if x.sku_id == unique_sku_id and x.description.lower() == "shipping commission"])
shipping_commission_tax = sum([float(x.amount) for x in payment_items if x.sku_id == unique_sku_id and x.description.lower() == "shipping commission tax"])
easy_ship_charges_sku = principal_bizone * ratio_mrp_bizone_to_easy_ship
received = principal + shipping + commission + commission_tax + fixed_closing_fee + fixed_closing_fee_tax + shipping_commission + shipping_commission_tax - easy_ship_charges_sku
if received > 0:
realization = ((received / qty) / mrp_bizone) * 100
#realization = principal_bizone * ratio_mrp_bizone_to_easy_ship
#realization = (((received - shipping) / qty) / mrp_bizone) * 100
else:
realization = 0.0
row.append("")
row.append("")
row.append("")
row.append(unique_sku_id)
row.append(title)
row.append(qty)
row.append(mrp_bizone)
row.append(principal_bizone)
row.append(principal)
row.append(shipping)
row.append(commission)
row.append(commission_tax)
row.append(fixed_closing_fee)
row.append(fixed_closing_fee_tax)
row.append(shipping_commission)
row.append(shipping_commission_tax)
row.append(easy_ship_charges_sku)
row.append(received)
row.append(realization)
row.append("")
row.append("")
row.append("")
row.append("")
data.append(row)
return data
| 37.520767
| 180
| 0.683157
| 1,625
| 11,744
| 4.661538
| 0.069538
| 0.106931
| 0.069703
| 0.104554
| 0.875776
| 0.842904
| 0.805413
| 0.80066
| 0.776634
| 0.766997
| 0
| 0.012423
| 0.156931
| 11,744
| 312
| 181
| 37.641026
| 0.752651
| 0.486546
| 0
| 0.24031
| 0
| 0
| 0.186208
| 0.003728
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.023256
| null | null | 0.031008
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
163962618ac744a7b37fd0b204d9b511e9066745
| 29,376
|
py
|
Python
|
E2E/espnet2/asr/encoder/avconformer_encoder.py
|
mispchallenge/MISP2021-AVSR
|
e26ab87bd3134d08d9571632bbffb0b5e731a830
|
[
"Apache-2.0"
] | null | null | null |
E2E/espnet2/asr/encoder/avconformer_encoder.py
|
mispchallenge/MISP2021-AVSR
|
e26ab87bd3134d08d9571632bbffb0b5e731a830
|
[
"Apache-2.0"
] | null | null | null |
E2E/espnet2/asr/encoder/avconformer_encoder.py
|
mispchallenge/MISP2021-AVSR
|
e26ab87bd3134d08d9571632bbffb0b5e731a830
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Tomoki Hayashi
# Apache 2.0 (http://www.apache.org/licenses/LICENSE-2.0)
"""Conformer encoder definition."""
from typing import Optional
from typing import Tuple
import logging
import torch
from typeguard import check_argument_types
from espnet.nets.pytorch_backend.conformer.convolution import ConvolutionModule
from espnet.nets.pytorch_backend.conformer.encoder_layer import EncoderLayer
from espnet.nets.pytorch_backend.nets_utils import get_activation
from espnet.nets.pytorch_backend.nets_utils import make_pad_mask
from espnet.nets.pytorch_backend.transformer.attention import (
MultiHeadedAttention, # noqa: H301
RelPositionMultiHeadedAttention, # noqa: H301
LegacyRelPositionMultiHeadedAttention, # noqa: H301
)
from espnet.nets.pytorch_backend.transformer.embedding import (
PositionalEncoding, # noqa: H301
ScaledPositionalEncoding, # noqa: H301
RelPositionalEncoding, # noqa: H301
LegacyRelPositionalEncoding, # noqa: H301
)
from espnet.nets.pytorch_backend.transformer.layer_norm import LayerNorm
from espnet.nets.pytorch_backend.transformer.multi_layer_conv import Conv1dLinear
from espnet.nets.pytorch_backend.transformer.multi_layer_conv import MultiLayeredConv1d
from espnet.nets.pytorch_backend.transformer.positionwise_feed_forward import (
PositionwiseFeedForward, # noqa: H301
)
from espnet.nets.pytorch_backend.transformer.repeat import repeat
from espnet.nets.pytorch_backend.transformer.subsampling import check_short_utt
from espnet.nets.pytorch_backend.transformer.subsampling import Conv2dSubsampling
from espnet.nets.pytorch_backend.transformer.subsampling import Conv2dSubsampling2
from espnet.nets.pytorch_backend.transformer.subsampling import Conv2dSubsampling6
from espnet.nets.pytorch_backend.transformer.subsampling import Conv2dSubsampling8
from espnet.nets.pytorch_backend.transformer.subsampling import TooShortUttError
from espnet2.asr.encoder.abs_encoder import AbsEncoder
from espnet2.asr.encoder.conformer_encoder import ConformerEncoder
from espnet.nets.pytorch_backend.transformer.subsampling import Conv2dSubsampling
from espnet.nets.pytorch_backend.nets_utils import make_pad_mask
from .network_audio_visual_fusion import AudioVisualFuse
# from network_audio_visual_fusion import AudioVisualFuse
class AVConformerEncoder(AbsEncoder):
def __init__(
self,
conformer_conf:dict,
feat_dim:int,
alayer_num1:int,
alayer_num2:int,
alayer_num3:int,
vlayer_num1:int,
vlayer_num2:int,
vlayer_num3:int,
avlayer_num:int
):
super().__init__()
conformer_conf["input_layer"] = None
conformer_conf["input_size"] = conformer_conf["output_size"]
self.conformer_conf =conformer_conf
self.input_layer = Conv2dSubsampling(
feat_dim,
conformer_conf["output_size"],
conformer_conf["dropout_rate"],
None)
self.alayer1 = ConformerEncoder(num_blocks=alayer_num1,**conformer_conf) #incluee embedding layer
self.alayer2 = ConformerEncoder(num_blocks=alayer_num2,**conformer_conf)
self.alayer3 = ConformerEncoder(num_blocks=alayer_num3,**conformer_conf)
self.vlayer1 = ConformerEncoder(num_blocks=vlayer_num1,**conformer_conf) #incluee embedding layer
self.vlayer2 = ConformerEncoder(num_blocks=vlayer_num2,**conformer_conf)
self.vlayer3 = ConformerEncoder(num_blocks=vlayer_num3,**conformer_conf)
self.fusion = torch.nn.Sequential(
torch.nn.Linear(conformer_conf["output_size"]*2, conformer_conf["output_size"]),
torch.nn.LayerNorm(conformer_conf["output_size"]),
torch.nn.ReLU(),
torch.nn.Dropout(conformer_conf["dropout_rate"]),
)
self.avlayer = ConformerEncoder(num_blocks=avlayer_num,**conformer_conf)
def output_size(self) -> int:
return self.conformer_conf["output_size"]
def forward(self,feats,feats_lengths,video,video_lengths
) -> Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]]:
"""
Args:
feats (torch.Tensor): Input tensor (#batch, L, input_size).
feats_lengths (torch.Tensor): Input length (#batch).
video (torch.Tensor): Input tensor (#batch, L, input_size). #video has subsampling
video_lengths (torch.Tensor): Input length (#batch)
"""
# aduio feat and video subsampling
masks = (~make_pad_mask(feats_lengths)[:, None, :]).to(feats.device)
org_feats,org_masks = self.input_layer(feats,masks)
org_feats_lengths = org_masks.squeeze(1).sum(1)
masks = (~make_pad_mask(video_lengths)[:, None, :]).to(video.device)
# log_file = open("/yrfs2/cv1/hangchen2/espnet/misp2021/asr1/exp/avsr_train_avsr_conformer_raw_zh_char_sp/test","a")
# print("#"*40,"input layer","#"*40,file=log_file)
# print(org_feats.shape,org_feats_lengths,file=log_file)
# print(org_video.shape,org_video_lengths,file=log_file)
#layer 1
outfeats1,outfeats_lengths1,_ = self.alayer1(org_feats,org_feats_lengths)
outvideo1,outvideo_lengths1,_ = self.vlayer1(video,org_feats_lengths)
#fusion 1+layer 2
# import pdb;pdb.set_trace()
x_concat = torch.cat((outfeats1, outvideo1), dim=-1)
amid_feat1= self.fusion(x_concat)
outfeats2,outfeats_lengths2,_ = self.alayer2(amid_feat1,outfeats_lengths1)
outvideo2,outvideo_lengths2,_ = self.vlayer2(outvideo1,outvideo_lengths1)
# print("#"*40,"layertwo","#"*40,file=log_file)
# print(outfeats2.shape,outfeats_lengths2,file=log_file)
# print(outvideo2.shape,outvideo_lengths2,file=log_file)
#skip connection + layer 3
outfeats3,outfeats_lengths3,_ = self.alayer3(org_feats+outfeats2,outfeats_lengths2)
outvideo3,outvideo_lengths3,_ = self.vlayer3(video+outvideo2,outvideo_lengths2)
# print("#"*40,"layerthree","#"*40,file=log_file)
# print(outfeats3.shape,outfeats_lengths3,file=log_file)
# print(outvideo3.shape,outvideo_lengths3,file=log_file)
#fusion 2 + layer 4
x_concat = torch.cat((outfeats3, outvideo3), dim=-1)
amid_feat2= self.fusion(x_concat)
hidden_feat,hidden_feat_lengths,_ = self.avlayer(amid_feat2,outfeats_lengths3)
return hidden_feat,hidden_feat_lengths,_
class AVConformerEncoder2(AbsEncoder):
def __init__(
self,
conformer_conf:dict,
feat_dim:int,
alayer_num1:int,
alayer_num2:int,
alayer_num3:int,
vlayer_num1:int,
):
super().__init__()
conformer_conf["input_layer"] = None
conformer_conf["input_size"] = conformer_conf["output_size"]
self.conformer_conf =conformer_conf
self.input_layer = Conv2dSubsampling(
feat_dim,
conformer_conf["output_size"],
conformer_conf["dropout_rate"],
None)
self.alayer1 = ConformerEncoder(num_blocks=alayer_num1,**conformer_conf) #incluee embedding layer
self.alayer2 = ConformerEncoder(num_blocks=alayer_num2,**conformer_conf)
self.alayer3 = ConformerEncoder(num_blocks=alayer_num3,**conformer_conf)
self.vlayer1 = ConformerEncoder(num_blocks=vlayer_num1,**conformer_conf) #incluee embedding layer
self.fusion = torch.nn.Sequential(
torch.nn.Linear(conformer_conf["output_size"]*2, conformer_conf["output_size"]),
torch.nn.LayerNorm(conformer_conf["output_size"]),
torch.nn.ReLU(),
torch.nn.Dropout(conformer_conf["dropout_rate"]),
)
def output_size(self) -> int:
return self.conformer_conf["output_size"]
def forward(self,feats,feats_lengths,video,video_lengths
) -> Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]]:
"""
Args:
feats (torch.Tensor): Input tensor (#batch, L, input_size).
feats_lengths (torch.Tensor): Input length (#batch).
video (torch.Tensor): Input tensor (#batch, L, input_size). #video has subsampling
video_lengths (torch.Tensor): Input length (#batch)
"""
# aduio downsampling while video has subsampling in frontend
masks = (~make_pad_mask(feats_lengths)[:, None, :]).to(feats.device)
org_feats,org_masks = self.input_layer(feats,masks)
org_feats_lengths = org_masks.squeeze(1).sum(1)
#fix length
if not org_feats_lengths.equal(video_lengths):
org_feats_lengths = org_feats_lengths.min(video_lengths)
video_lengths = org_feats_lengths.clone()
feats = feats[:,:max(org_feats_lengths)]
video = video[:,:max(video_lengths)]
#fusion 1 + layer 1
x_concat = torch.cat((org_feats, video), dim=-1)
amid_feat1= self.fusion(x_concat)
outfeats1,outfeats_lengths1,_ = self.alayer1(amid_feat1,org_feats_lengths)
outvideo1,outvideo_lengths1,_ = self.vlayer1(video,org_feats_lengths)
#fusion 2+layer 2
x_concat = torch.cat((outfeats1, outvideo1), dim=-1)
amid_feat1= self.fusion(x_concat)
outfeats2,outfeats_lengths2,_ = self.alayer2(amid_feat1,outfeats_lengths1)
#skip connection + layer av
hidden_feat,hidden_feat_lengths,_ = self.alayer3(org_feats+outfeats2,outfeats_lengths2)
return hidden_feat,hidden_feat_lengths,_
class AVConformerEncoder3(AbsEncoder):
def __init__(
self,
conformer_conf:dict,
feat_dim:int,
alayer_num1:int,
alayer_num2:int,
vlayer_num1:int,
):
super().__init__()
conformer_conf["input_layer"] = None
conformer_conf["input_size"] = conformer_conf["output_size"]
self.conformer_conf =conformer_conf
self.input_layer = Conv2dSubsampling(
feat_dim,
conformer_conf["output_size"],
conformer_conf["dropout_rate"],
None)
self.alayer1 = ConformerEncoder(num_blocks=alayer_num1,**conformer_conf)
self.alayer2 = ConformerEncoder(num_blocks=alayer_num2,**conformer_conf)
self.vlayer1 = ConformerEncoder(num_blocks=vlayer_num1,**conformer_conf)
self.fusion = torch.nn.Sequential(
torch.nn.Linear(conformer_conf["output_size"]*2, conformer_conf["output_size"]),
torch.nn.LayerNorm(conformer_conf["output_size"]),
torch.nn.ReLU(),
torch.nn.Dropout(conformer_conf["dropout_rate"]),
)
def output_size(self) -> int:
return self.conformer_conf["output_size"]
def forward(self,feats,feats_lengths,video,video_lengths
) -> Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]]:
"""
Args:
feats (torch.Tensor): Input tensor (#batch, L, input_size).
feats_lengths (torch.Tensor): Input length (#batch).
video (torch.Tensor): Input tensor (#batch, L, input_size). #video has subsampling
video_lengths (torch.Tensor): Input length (#batch)
"""
# aduio feat and video subsampling
masks = (~make_pad_mask(feats_lengths)[:, None, :]).to(feats.device)
org_feats,org_masks = self.input_layer(feats,masks)
org_feats_lengths = org_masks.squeeze(1).sum(1)
masks = (~make_pad_mask(video_lengths)[:, None, :]).to(video.device)
#fusion 1+layer 1
x_concat = torch.cat((org_feats, video), dim=-1)
amid_feat1= self.fusion(x_concat)
outfeats1,outfeats_lengths1,_ = self.alayer1(org_feats,org_feats_lengths)
outvideo1,outvideo_lengths1,_ = self.vlayer1(video,org_feats_lengths)
#fusion 2+layer 2
x_concat = torch.cat((outfeats1, outvideo1), dim=-1)
amid_feat2= self.fusion(x_concat)
hidden_feat,hidden_feat_lengths,_ = self.alayer2(amid_feat2,outfeats_lengths1)
return hidden_feat,hidden_feat_lengths,_
class AVConformerEncoder4(AbsEncoder): # [b,T,512]->[b,T,256]
def __init__(
self,
conformer_conf:dict,
alayer_num1:int,
):
super().__init__()
conformer_conf["input_layer"] = None
conformer_conf["input_size"] = conformer_conf["output_size"]
self.conformer_conf =conformer_conf
self.fusion = DimConvert(in_channels=512*2,out_channels=256)
self.alayer1 = ConformerEncoder(num_blocks=alayer_num1,**conformer_conf) #incluee embedding layer
self.subsampling = Conv2dSubsampling(
512,
512,
conformer_conf["dropout_rate"],
None)
def output_size(self) -> int:
return self.conformer_conf["output_size"]
def forward(self,feats,feats_lengths,video,video_lengths
) -> Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]]:
"""
Args:
feats (torch.Tensor): Input tensor (#batch, L, input_size).
feats_lengths (torch.Tensor): Input length (#batch).
video (torch.Tensor): Input tensor (#batch, L, input_size). #video has subsampling
video_lengths (torch.Tensor): Input length (#batch)
"""
masks = (~make_pad_mask(feats_lengths)[:, None, :]).to(feats_lengths.device)
# log_file = open("/yrfs2/cv1/hangchen2/espnet/misp2021/asr1/expfarlipfar/comformer_avsr_far_av/test","a")
# print("feattype:{feats.device} {feats_lengths.device}",file=log_file)
# print("videotype:{video.device} {video_lengths.device}",file=log_file)
# print("masktype:{mask.device}",file=log_file)
feats, masks = self.subsampling(feats, masks)
feats_lengths = masks.squeeze(1).sum(1)
feats_lengths = feats_lengths.min(video_lengths)
video_lengths = feats_lengths.clone()
feats = feats[:,:max(feats_lengths)]
video = video[:,:max(feats_lengths)]
#fusion 1+layer 1
x_concat = torch.cat((feats, video), dim=-1) #B,T,1024
amid_feat1= self.fusion(x_concat) #B,T,256
hidden_feat,hidden_feat_lengths,_ = self.alayer1(amid_feat1,feats_lengths)
return hidden_feat,hidden_feat_lengths,_
class DimConvert(torch.nn.Module): #(B,T,D)->(B,T,D)
def __init__(
self,
in_channels:int,
out_channels:int,
):
super().__init__()
settings = {
"in_channels":in_channels,
"out_channels":out_channels,
"kernel_size":1,
"stride":1,
"bias":False,
"padding":0,
}
self.convert = torch.nn.Sequential(
torch.nn.Conv1d(**settings),
torch.nn.BatchNorm1d(out_channels),
torch.nn.PReLU(out_channels),
torch.nn.Dropout(0.1)
)
def forward(self,tensor):
return self.convert(tensor.transpose(1,2)).transpose(1,2)
"""
AVConformerEncoder5 is similar to AVConformerEncoder2 and is used for wav preencode 25ps +video 25 ps ,which have the same fps;for feat preencode 100ps + video 25ps you can use AVConformerEncoder6
"""
class AVConformerEncoder5(AbsEncoder):
def __init__(
self,
conformer_conf:dict,
alayer_num1:int=3,
alayer_num2:int=3,
alayer_num3:int=3,
vlayer_num1:int=3,
):
super().__init__()
conformer_conf["input_layer"] = None
conformer_conf["input_size"] = conformer_conf["output_size"]
self.conformer_conf =conformer_conf
self.alayer1 = ConformerEncoder(num_blocks=alayer_num1,**conformer_conf)
self.alayer2 = ConformerEncoder(num_blocks=alayer_num2,**conformer_conf)
self.alayer3 = ConformerEncoder(num_blocks=alayer_num3,**conformer_conf)
video_conformer_conf = conformer_conf.copy()
video_conformer_conf["input_size"] = 512
video_conformer_conf["input_layer"] = "linear"
self.vlayer1 = ConformerEncoder(num_blocks=vlayer_num1,**video_conformer_conf)
self.fusion1 = DimConvert(in_channels=512*2,out_channels=256)
self.fusion2 = DimConvert(in_channels=256*2,out_channels=256)
self.audioturner = DimConvert(in_channels=256*2,out_channels=256)
def output_size(self) -> int:
return self.conformer_conf["output_size"]
def forward(self,feats,feats_lengths,video,video_lengths
) -> Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]]:
"""
Args:
feats (torch.Tensor): Input tensor (#batch, T, 256).
feats_lengths (torch.Tensor): Input length (#batch).
video (torch.Tensor): Input tensor (#batch, T, 256). #video and audio both 25 ps
video_lengths (torch.Tensor): Input length (#batch)
"""
# both have nearly same dimensional
feats_lengths = feats_lengths.min(video_lengths)
video_lengths = feats_lengths.clone()
feats = feats[:,:max(feats_lengths)]
video = video[:,:max(feats_lengths)]
# log_file = open("/yrfs2/cv1/hangchen2/espnet/misp2021/asr1/expwavlip/test1","a")
# print("------in encoder -----")
# print(f"feats.shape:{feats.shape},feats_lengths:{feats_lengths}",file=log_file)
# print(f"video.shape:{video.shape},video_lengths:{video_lengths}",file=log_file)
#fusion 1 + layer 1
x_concat = torch.cat((feats, video), dim=-1)
amid_feat = self.fusion1(x_concat)
# print(f"famid_feat.shape:{amid_feat.shape},outfeats_lengths:{feats_lengths}",file=log_file)
outfeats1,outfeats_lengths1,_ = self.alayer1(amid_feat,feats_lengths)
outvideo1,outvideo_lengths1,_ = self.vlayer1(video,video_lengths)
#fusion 2+layer 2
x_concat1 = torch.cat((outfeats1, outvideo1), dim=-1)
amid_feat1 = self.fusion2(x_concat1)
outfeats2,outfeats_lengths2,_ = self.alayer2(amid_feat1,outfeats_lengths1)
#skip connection + layer av
res = self.audioturner(feats)
hidden_feat,hidden_feat_lengths,_ = self.alayer3(outfeats2+res,outfeats_lengths2)
return hidden_feat,hidden_feat_lengths,_
"""
AVConformerEncoder6 is similar to AVConformerEncoder2, and is used for feat preencode 100ps video 25ps ,it will downsampling feat preencode first
"""
class AVConformerEncoder6(AbsEncoder):
def __init__(
self,
conformer_conf:dict,
alayer_num1:int=3,
alayer_num2:int=3,
alayer_num3:int=3,
vlayer_num1:int=3,
):
super().__init__()
conformer_conf["input_layer"] = None
conformer_conf["input_size"] = conformer_conf["output_size"] #512
self.conformer_conf =conformer_conf
self.alayer1 = ConformerEncoder(num_blocks=alayer_num1,**conformer_conf)
self.alayer2 = ConformerEncoder(num_blocks=alayer_num2,**conformer_conf)
self.alayer3 = ConformerEncoder(num_blocks=alayer_num3,**conformer_conf)
video_conformer_conf = conformer_conf.copy()
video_conformer_conf["input_size"] = 512
video_conformer_conf["input_layer"] = "linear"
self.vlayer1 = ConformerEncoder(num_blocks=vlayer_num1,**video_conformer_conf)
self.fusion1 = DimConvert(in_channels=512*2,out_channels=256)
self.fusion2 = DimConvert(in_channels=256*2,out_channels=256)
self.audioturner = DimConvert(in_channels=256*2,out_channels=256)
self.subsampling = Conv2dSubsampling(
512,
512,
conformer_conf["dropout_rate"],
None)
def output_size(self) -> int:
return self.conformer_conf["output_size"]
def forward(self,feats,feats_lengths,video,video_lengths
) -> Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]]:
"""
Args:
feats (torch.Tensor): Input tensor (#batch, T, 256).
feats_lengths (torch.Tensor): Input length (#batch).
video (torch.Tensor): Input tensor (#batch, T, 256). #video and audio both 25 ps
video_lengths (torch.Tensor): Input length (#batch)
"""
#downsampling audio 100fps -> 25fps
# log_file = open("/yrfs2/cv1/hangchen2/espnet/misp2021/asr1/exp_gssfar_lipfar/featfinal_avsr_far_av/test","a")
# print(f"feats:{feats.device},feats_lengths:{feats_lengths.device}",file=log_file)
# print(f"video.shape:{video.device},video_lengths:{video_lengths.device}",file=log_file)
masks = (~make_pad_mask(feats_lengths)[:, None, :]).to(video_lengths.device)
feats, masks = self.subsampling(feats, masks)
feats_lengths = masks.squeeze(1).sum(1)
# both have nearly same dimensional
feats_lengths = feats_lengths.min(video_lengths)
video_lengths = feats_lengths.clone()
feats = feats[:,:max(feats_lengths)]
video = video[:,:max(feats_lengths)]
#fusion 1 + layer 1
x_concat = torch.cat((feats, video), dim=-1)
amid_feat = self.fusion1(x_concat)
# print(f"famid_feat.shape:{amid_feat.shape},outfeats_lengths:{feats_lengths}",file=log_file)
outfeats1,outfeats_lengths1,_ = self.alayer1(amid_feat,feats_lengths)
outvideo1,outvideo_lengths1,_ = self.vlayer1(video,video_lengths)
#fusion 2+layer 2
x_concat1 = torch.cat((outfeats1, outvideo1), dim=-1)
amid_feat1 = self.fusion2(x_concat1)
outfeats2,outfeats_lengths2,_ = self.alayer2(amid_feat1,outfeats_lengths1)
#skip connection + layer av
res = self.audioturner(feats)
hidden_feat,hidden_feat_lengths,_ = self.alayer3(outfeats2+res,outfeats_lengths2)
return hidden_feat,hidden_feat_lengths,_
"""
AVConformerEncoder7 is similar to AVConformerEncoder4, and is used for wav preencode 25ps +video 25 ps ,which don't need downsampling
"""
class AVConformerEncoder7(AbsEncoder): # [b,T,512]->[b,T,256]
def __init__(
self,
conformer_conf:dict,
alayer_num1:int,
):
super().__init__()
conformer_conf["input_layer"] = None
conformer_conf["input_size"] = conformer_conf["output_size"]
self.conformer_conf =conformer_conf
self.fusion = DimConvert(in_channels=512*2,out_channels=256)
self.alayer1 = ConformerEncoder(num_blocks=alayer_num1,**conformer_conf) #incluee embedding layer
def output_size(self) -> int:
return self.conformer_conf["output_size"]
def forward(self,feats,feats_lengths,video,video_lengths
) -> Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]]:
"""
Args:
feats (torch.Tensor): Input tensor (#batch, L, input_size).
feats_lengths (torch.Tensor): Input length (#batch).
video (torch.Tensor): Input tensor (#batch, L, input_size).
video_lengths (torch.Tensor): Input length (#batch)
"""
feats_lengths = feats_lengths.min(video_lengths)
video_lengths = feats_lengths.clone()
feats = feats[:,:max(feats_lengths)]
video = video[:,:max(feats_lengths)]
#fusion 1+layer 1
x_concat = torch.cat((feats, video), dim=-1) #B,T,1024
amid_feat1= self.fusion(x_concat) #B,T,256
hidden_feat,hidden_feat_lengths,_ = self.alayer1(amid_feat1,feats_lengths)
return hidden_feat,hidden_feat_lengths,_
# conformer_conf = {"output_size": 256 , # dimension of attention
# "attention_heads": 4,
# "linear_units": 2048 , # the number of units of position-wise feed forward
# "dropout_rate": 0.1,
# "positional_dropout_rate": 0.1,
# "attention_dropout_rate": 0.0,
# "input_layer": "conv2d" ,# encoder architecture type
# "normalize_before": True,
# "pos_enc_layer_type": "rel_pos",
# "selfattention_layer_type": "rel_selfattn",
# "activation_type": "swish",
# "macaron_style": True,
# "use_cnn_module": True,
# "cnn_module_kernel": 15}
# encoder = AVConformerEncoder5(conformer_conf)
# feats = torch.rand(16,90,512)
# video = torch.rand(16,90,512)
# feats_l = torch.randint(40,91,(16,))
# feats_l[0] = 90
# video_l = torch.randint(90,91,(16,))
# video_l[0] = 90
# hidden_feat,hidden_feat_lengths,_ = encoder(feats,feats_l,video,video_l)
# print(hidden_feat.shape,hidden_feat_lengths.shape)
class TCNFusionEncoder(AbsEncoder): # [b,T,512]->[b,T,256*3]
def __init__(
self,
single_input_dim=512,
fuse_type="tcn",
hidden_channels=[256 *3, 256 * 3, 256 * 3],
kernels_size= [3, 5, 7],
dropout=0.2,
act_type="prelu",
downsample_type="norm"
):
super().__init__()
fuse_setting = {
'in_channels': [single_input_dim, single_input_dim],
"hidden_channels":hidden_channels,
"kernels_size":kernels_size,
"dropout":dropout,
"act_type":act_type,
"downsample_type":downsample_type,
}
self.subsampling = Conv2dSubsampling(
single_input_dim,
single_input_dim,
dropout,
None)
self.fusion = AudioVisualFuse(fuse_type=fuse_type, fuse_setting=fuse_setting)
self.dimturner = DimConvert(in_channels=256*3,out_channels=256)
def output_size(self) -> int:
return 256
def forward(self,feats,feats_lengths,video,video_lengths
) -> Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]]:
"""
Args:
feats (torch.Tensor): Input tensor (#batch, L, input_size).
feats_lengths (torch.Tensor): Input length (#batch).
video (torch.Tensor): Input tensor (#batch, L, input_size). #video has subsampling
video_lengths (torch.Tensor): Input length (#batch)
"""
#100 fps ->25fps downsampling and alignment
masks = (~make_pad_mask(feats_lengths)[:, None, :]).to(feats_lengths.device)
# log_file = open("/yrfs2/cv1/hangchen2/espnet/misp2021/asr1/expfarlipfar/tcn_avsr_far_av/test","a")
# print("feattype:{feats.device} {feats_lengths.device}",file=log_file)
# print("videotype:{video.device} {video_lengths.device}",file=log_file)
# print("masktype:{mask.device}",file=log_file)
feats, masks = self.subsampling(feats, masks)
feats_lengths = masks.squeeze(1).sum(1)
feats_lengths = feats_lengths.min(video_lengths)
video_lengths = feats_lengths.clone()
feats = feats[:,:max(feats_lengths)]
video = video[:,:max(feats_lengths)]
#fusion TCN
feats = feats.transpose(1,2)#[B,T,D]->[B,D,T]
video = video.transpose(1,2)
hidden_feat, hidden_feat_lengths = self.fusion([feats], [video], feats_lengths) #[B,D,T]->[B,D,T]
hidden_feat = self.dimturner(hidden_feat.transpose(1,2))
return hidden_feat,hidden_feat_lengths,None
# print("hhh")
# fusionnet = TCNFusionEncoder(**dict( single_input_dim=512,
# fuse_type="tcn",
# hidden_channels=[256 *3, 256 * 3, 256 * 3],
# kernels_size= [3, 5, 7],
# dropout=0.2,
# act_type="prelu",
# downsample_type="norm"))
# feats = torch.rand(16,90,512)
# video = torch.rand(16,90,512)
# feats_l = torch.randint(40,91,(16,))
# feats_l[0] = 90
# video_l = torch.randint(90,91,(16,))
# video_l[0] = 90
# print("hhh")
# hidden_feat,hidden_feat_lengths = fusionnet(feats,feats_l,video,video_l)
# print(hidden_feat.shape,hidden_feat_lengths)
class VConformerEncoder(AbsEncoder): # [b,T,512]->[b,T,256]
def __init__(
self,
conformer_conf:dict,
vlayer_num1:int,
):
super().__init__()
conformer_conf["input_layer"] = None
conformer_conf["input_size"] = conformer_conf["output_size"]
self.conformer_conf =conformer_conf
self.vlayer1 = ConformerEncoder(num_blocks=vlayer_num1,**conformer_conf)
def output_size(self) -> int:
return self.conformer_conf["output_size"]
def forward(self,video,video_lengths
) -> Tuple[torch.Tensor, torch.Tensor, Optional[torch.Tensor]]:
"""
Args:
feats (torch.Tensor): Input tensor (#batch, L, input_size).
feats_lengths (torch.Tensor): Input length (#batch).
video (torch.Tensor): Input tensor (#batch, L, input_size). #video has subsampling
video_lengths (torch.Tensor): Input length (#batch)
"""
hidden_feat,hidden_feat_lengths,_ = self.vlayer1(video,video_lengths)
return hidden_feat,hidden_feat_lengths,_
| 43.136564
| 197
| 0.651008
| 3,440
| 29,376
| 5.292442
| 0.091279
| 0.07926
| 0.031638
| 0.036636
| 0.805614
| 0.791223
| 0.764583
| 0.740965
| 0.704987
| 0.699879
| 0
| 0.030537
| 0.236383
| 29,376
| 680
| 198
| 43.2
| 0.781072
| 0.240809
| 0
| 0.73253
| 0
| 0
| 0.035762
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.06988
| false
| 0
| 0.06506
| 0.024096
| 0.204819
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1649d7e48d6b2c14d0af725884e5c7c19ead0769
| 13,615
|
py
|
Python
|
djenealog/migrations/0001_initial.py
|
nim65s/djenealog
|
3666cd65d22e118294f477b41f8e56b04b56a52d
|
[
"BSD-2-Clause"
] | null | null | null |
djenealog/migrations/0001_initial.py
|
nim65s/djenealog
|
3666cd65d22e118294f477b41f8e56b04b56a52d
|
[
"BSD-2-Clause"
] | 2
|
2018-04-25T11:54:34.000Z
|
2020-03-09T07:11:35.000Z
|
djenealog/migrations/0001_initial.py
|
nim65s/djenealog
|
3666cd65d22e118294f477b41f8e56b04b56a52d
|
[
"BSD-2-Clause"
] | null | null | null |
# Generated by Django 2.2.4 on 2019-09-29 16:56
import autoslug.fields
import django.contrib.gis.db.models.fields
from django.db import migrations, models
import django.db.models.deletion
import ndh.models
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name="Couple",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("debut", models.DateField(blank=True, null=True)),
("fin", models.DateField(blank=True, null=True)),
("commentaires", models.TextField(blank=True, null=True)),
],
bases=(models.Model, ndh.models.Links),
),
migrations.CreateModel(
name="Individu",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("nom", models.CharField(blank=True, max_length=50)),
(
"prenom",
models.CharField(blank=True, max_length=50, verbose_name="Prénom"),
),
(
"usage",
models.CharField(
blank=True, max_length=50, verbose_name="Prénom d’usage"
),
),
(
"epouse",
models.CharField(
blank=True,
max_length=50,
verbose_name="Nom d’épouse ou d’usage",
),
),
("masculin", models.NullBooleanField()),
("wikidata", models.PositiveIntegerField(blank=True, null=True)),
("commentaires", models.TextField(blank=True, null=True)),
(
"parents",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name="enfants",
to="djenealog.Couple",
),
),
],
bases=(models.Model, ndh.models.Links),
),
migrations.CreateModel(
name="Lieu",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
("name", models.CharField(max_length=200, unique=True)),
(
"slug",
autoslug.fields.AutoSlugField(
editable=False, populate_from="name", unique=True
),
),
("wikidata", models.PositiveIntegerField(blank=True, null=True)),
(
"point",
django.contrib.gis.db.models.fields.PointField(
blank=True, geography=True, null=True, srid=4326
),
),
],
options={
"verbose_name_plural": "Lieux",
},
bases=(ndh.models.Links, models.Model),
),
migrations.CreateModel(
name="Pacs",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"y",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="année"
),
),
(
"m",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="mois"
),
),
(
"d",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="jour"
),
),
("commentaires", models.TextField(blank=True, null=True)),
(
"inst",
models.OneToOneField(
on_delete=django.db.models.deletion.PROTECT,
to="djenealog.Couple",
),
),
(
"lieu",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to="djenealog.Lieu",
),
),
],
options={
"verbose_name_plural": "pacs",
},
),
migrations.CreateModel(
name="Naissance",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"y",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="année"
),
),
(
"m",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="mois"
),
),
(
"d",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="jour"
),
),
("commentaires", models.TextField(blank=True, null=True)),
(
"inst",
models.OneToOneField(
on_delete=django.db.models.deletion.PROTECT,
to="djenealog.Individu",
),
),
(
"lieu",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to="djenealog.Lieu",
),
),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="Mariage",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"y",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="année"
),
),
(
"m",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="mois"
),
),
(
"d",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="jour"
),
),
("commentaires", models.TextField(blank=True, null=True)),
(
"inst",
models.OneToOneField(
on_delete=django.db.models.deletion.PROTECT,
to="djenealog.Couple",
),
),
(
"lieu",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to="djenealog.Lieu",
),
),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="Divorce",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"y",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="année"
),
),
(
"m",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="mois"
),
),
(
"d",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="jour"
),
),
("commentaires", models.TextField(blank=True, null=True)),
(
"inst",
models.OneToOneField(
on_delete=django.db.models.deletion.PROTECT,
to="djenealog.Couple",
),
),
(
"lieu",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to="djenealog.Lieu",
),
),
],
options={
"abstract": False,
},
),
migrations.CreateModel(
name="Deces",
fields=[
(
"id",
models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name="ID",
),
),
(
"y",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="année"
),
),
(
"m",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="mois"
),
),
(
"d",
models.PositiveSmallIntegerField(
blank=True, null=True, verbose_name="jour"
),
),
("commentaires", models.TextField(blank=True, null=True)),
(
"inst",
models.OneToOneField(
on_delete=django.db.models.deletion.PROTECT,
to="djenealog.Individu",
),
),
(
"lieu",
models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.PROTECT,
to="djenealog.Lieu",
),
),
],
options={
"verbose_name": "décès",
"verbose_name_plural": "décès",
},
),
migrations.AddField(
model_name="couple",
name="femme",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name="femme",
to="djenealog.Individu",
),
),
migrations.AddField(
model_name="couple",
name="mari",
field=models.ForeignKey(
blank=True,
null=True,
on_delete=django.db.models.deletion.PROTECT,
related_name="mari",
to="djenealog.Individu",
),
),
]
| 34.208543
| 88
| 0.336981
| 778
| 13,615
| 5.803342
| 0.140103
| 0.077741
| 0.093023
| 0.128018
| 0.832337
| 0.832337
| 0.792248
| 0.76567
| 0.76567
| 0.731118
| 0
| 0.00517
| 0.573779
| 13,615
| 397
| 89
| 34.29471
| 0.772876
| 0.003305
| 0
| 0.764103
| 1
| 0
| 0.057703
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.012821
| 0
| 0.023077
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
16a17c9e305d492b5216eeddefb43f1ba5bfee79
| 14,430
|
py
|
Python
|
tests/local/warehouse/metrics/test_all_table_metrics.py
|
Tonkonozhenko/soda-sql
|
a671cc3f20b814a074f064c8a24b4a10447f40f4
|
[
"Apache-2.0"
] | null | null | null |
tests/local/warehouse/metrics/test_all_table_metrics.py
|
Tonkonozhenko/soda-sql
|
a671cc3f20b814a074f064c8a24b4a10447f40f4
|
[
"Apache-2.0"
] | null | null | null |
tests/local/warehouse/metrics/test_all_table_metrics.py
|
Tonkonozhenko/soda-sql
|
a671cc3f20b814a074f064c8a24b4a10447f40f4
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2020 Soda
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from sodasql.scan.metric import Metric
from sodasql.scan.scan_yml_parser import KEY_METRIC_GROUPS, KEY_METRICS, KEY_COLUMNS, COLUMN_KEY_TESTS
from tests.common.sql_test_case import SqlTestCase
from decimal import *
class TestMetricGroups(SqlTestCase):
def test_metric_group_duplicates(self):
self.sql_recreate_table(
[f"score {self.dialect.data_type_varchar_255}"],
["('1')",
"('2')",
"('2')",
"('3')",
"('3')",
"('3')",
"('3')",
"('3')",
"('4')",
"('4')",
"('5')",
"(null)"])
scan_result = self.scan({
KEY_METRIC_GROUPS: [
Metric.METRIC_GROUP_DUPLICATES
]
})
self.assertEqual(scan_result.get(Metric.DISTINCT), 5)
self.assertEqual(scan_result.get(Metric.DUPLICATE_COUNT), 3)
self.assertEqual(scan_result.get(Metric.UNIQUENESS), 40)
self.assertEqual(scan_result.get(Metric.UNIQUE_COUNT), 2)
def test_metric_group_length(self):
self.sql_recreate_table(
[f"score {self.dialect.data_type_varchar_255}"],
["('1')",
"('2')",
"('2')",
"('3')",
"('3')",
"('3')",
"('3')",
"('3')",
"('4')",
"('4')",
"('5')",
"(null)"])
scan_result = self.scan({
KEY_METRIC_GROUPS: [
Metric.METRIC_GROUP_LENGTH
]
})
self.assertEqual(scan_result.get(Metric.AVG_LENGTH), 1)
self.assertEqual(scan_result.get(Metric.MAX_LENGTH), 1)
self.assertEqual(scan_result.get(Metric.MIN_LENGTH), 1)
def test_metric_group_missing(self):
self.sql_recreate_table(
[f"score {self.dialect.data_type_varchar_255}"],
["('1')",
"('2')",
"('2')",
"('3')",
"('3')",
"('3')",
"('3')",
"('3')",
"('4')",
"('4')",
"('5')",
"(null)"])
scan_result = self.scan({
KEY_METRIC_GROUPS: [
Metric.METRIC_GROUP_MISSING
]
})
self.assertEqual(scan_result.get(Metric.MISSING_COUNT), 1)
self.assertEqual(scan_result.get(Metric.MISSING_PERCENTAGE), 8.333333333333334) # (1/12)%
self.assertEqual(scan_result.get(Metric.VALUES_COUNT), 11)
self.assertEqual(scan_result.get(Metric.VALUES_PERCENTAGE), 91.66666666666667) # (11/12)%
def test_metric_group_profiling(self):
self.sql_recreate_table(
[f"score {self.dialect.data_type_varchar_255}"],
["('1')",
"('2')",
"('2')",
"('3')",
"('3')",
"('3')",
"('3')",
"('3')",
"('4')",
"('4')",
"('5')",
"(null)"])
scan_result = self.scan({
KEY_METRIC_GROUPS: [
Metric.METRIC_GROUP_PROFILING
]
})
self.assertCountEqual(scan_result.get(Metric.FREQUENT_VALUES),
[{'frequency': 5, 'value': '3'},
{'frequency': 2, 'value': '2'},
{'frequency': 2, 'value': '4'},
{'frequency': 1, 'value': '5'},
{'frequency': 1, 'value': '1'}]
)
self.assertEqual(scan_result.get(Metric.MAXS), ['5', '4', '3', '2', '1'])
self.assertEqual(scan_result.get(Metric.MINS), ['1', '2', '3', '4', '5'])
def test_metric_group_statistics(self):
self.sql_recreate_table(
[f"score {self.dialect.data_type_integer}"],
["(1)",
"(2)",
"(2)",
"(3)",
"(3)",
"(3)",
"(3)",
"(3)",
"(4)",
"(4)",
"(5)",
"(null)"])
scan_result = self.scan({
KEY_METRIC_GROUPS: [
Metric.METRIC_GROUP_STATISTICS
]
})
self.assertEqual(scan_result.get(Metric.AVG), 3)
self.assertEqual(scan_result.get(Metric.SUM), 33)
self.assertEqual(scan_result.get(Metric.MAX), 5)
self.assertEqual(scan_result.get(Metric.MIN), 1)
self.assertAlmostEqual(scan_result.get(Metric.STDDEV), Decimal(1.09544), 4)
self.assertAlmostEqual(scan_result.get(Metric.VARIANCE), Decimal(1.2000), 4)
def test_metric_group_validity(self):
self.sql_recreate_table(
[f"score {self.dialect.data_type_varchar_255}"],
["('1')",
"('2')",
"('2')",
"('3')",
"('3')",
"('3')",
"('3')",
"('3')",
"('4')",
"('4')",
"('5')",
"(null)"])
scan_result = self.scan({
KEY_METRIC_GROUPS: [
Metric.METRIC_GROUP_VALIDITY
]
})
self.assertEqual(scan_result.get(Metric.INVALID_COUNT), 0)
self.assertEqual(scan_result.get(Metric.INVALID_PERCENTAGE), 0.0)
self.assertEqual(scan_result.get(Metric.VALID_COUNT), 11)
self.assertEqual(scan_result.get(Metric.VALID_PERCENTAGE), 91.66666666666667)
def test_metric_group_all_text(self):
self.sql_recreate_table(
[f"score {self.dialect.data_type_varchar_255}"],
["(1)",
"(2)",
"(2)",
"(3)",
"(3)",
"(3)",
"(3)",
"(3)",
"(4)",
"(4)",
"(5)",
"(null)"])
scan_result = self.scan({
KEY_METRIC_GROUPS: [
Metric.METRIC_GROUP_ALL
]
})
self.assertEqual(scan_result.get(Metric.ROW_COUNT), 12)
self.assertDictEqual(scan_result.get(Metric.SCHEMA)[0], {'dataType': 'character varying',
'name': 'score',
'nullable': True,
'semanticType': 'text',
'type': 'character varying'})
self.assertEqual(scan_result.get(Metric.AVG_LENGTH), 1)
self.assertEqual(scan_result.get(Metric.DISTINCT), 5)
self.assertEqual(scan_result.get(Metric.DUPLICATE_COUNT), 3)
self.assertCountEqual(scan_result.get(Metric.FREQUENT_VALUES), [{'frequency': 5, 'value': '3'},
{'frequency': 2, 'value': '2'},
{'frequency': 2, 'value': '4'},
{'frequency': 1, 'value': '5'},
{'frequency': 1, 'value': '1'}])
self.assertEqual(scan_result.get(Metric.INVALID_COUNT), 0)
self.assertEqual(scan_result.get(Metric.INVALID_PERCENTAGE), 0)
self.assertEqual(scan_result.get(Metric.MAXS), ['5', '4', '3', '2', '1'])
self.assertEqual(scan_result.get(Metric.MAX_LENGTH), 1)
self.assertEqual(scan_result.get(Metric.MINS), ['1', '2', '3', '4', '5'])
self.assertEqual(scan_result.get(Metric.MIN_LENGTH), 1)
self.assertEqual(scan_result.get(Metric.MISSING_COUNT), 1)
self.assertEqual(scan_result.get(Metric.MISSING_PERCENTAGE), 8.333333333333334)
self.assertEqual(scan_result.get(Metric.UNIQUENESS), 40)
self.assertEqual(scan_result.get(Metric.UNIQUE_COUNT), 2)
self.assertEqual(scan_result.get(Metric.VALID_COUNT), 11)
self.assertEqual(scan_result.get(Metric.VALID_PERCENTAGE), 91.66666666666667)
self.assertEqual(scan_result.get(Metric.VALUES_COUNT), 11)
self.assertEqual(scan_result.get(Metric.VALUES_PERCENTAGE), 91.66666666666667)
self.assertEqual(20, len(scan_result.measurements))
def test_metric_group_all_number(self):
self.sql_recreate_table(
[f"score {self.dialect.data_type_integer}"],
["(1)",
"(2)",
"(2)",
"(3)",
"(3)",
"(3)",
"(3)",
"(3)",
"(4)",
"(4)",
"(5)",
"(null)"])
scan_result = self.scan({
KEY_METRIC_GROUPS: [
Metric.METRIC_GROUP_ALL
]
})
self.assertEqual(scan_result.get(Metric.ROW_COUNT), 12)
self.assertDictEqual(scan_result.get(Metric.SCHEMA)[0], {'dataType': 'integer',
'name': 'score',
'nullable': True,
'semanticType': 'number',
'type': 'integer'})
self.assertEqual(scan_result.get(Metric.AVG), 3.0)
self.assertEqual(scan_result.get(Metric.DISTINCT), 5)
self.assertEqual(scan_result.get(Metric.DUPLICATE_COUNT), 3)
self.assertCountEqual(scan_result.get(Metric.FREQUENT_VALUES), [{'frequency': 5, 'value': 3},
{'frequency': 2, 'value': 2},
{'frequency': 2, 'value': 4},
{'frequency': 1, 'value': 5},
{'frequency': 1, 'value': 1}])
self.assertDictEqual(scan_result.get(Metric.HISTOGRAM), {'boundaries': [1.0,
1.2,
1.4,
1.6,
1.8,
2.0,
2.2,
2.4,
2.6,
2.8,
3.0,
3.2,
3.4,
3.6,
3.8,
4.0,
4.2,
4.4,
4.6,
4.8,
5.0],
'frequencies': [1, 0, 0, 0, 0, 2, 0, 0, 0, 0, 5, 0, 0,
0, 0, 2, 0, 0, 0, 1]})
self.assertEqual(scan_result.get(Metric.INVALID_COUNT), 0)
self.assertEqual(scan_result.get(Metric.INVALID_PERCENTAGE), 0)
self.assertEqual(scan_result.get(Metric.MAX), 5)
self.assertEqual(scan_result.get(Metric.MAXS), [5, 4, 3, 2, 1])
self.assertEqual(scan_result.get(Metric.MIN), 1)
self.assertEqual(scan_result.get(Metric.MINS), [1, 2, 3, 4, 5])
self.assertEqual(scan_result.get(Metric.MISSING_COUNT), 1)
self.assertEqual(scan_result.get(Metric.MISSING_PERCENTAGE), 8.333333333333334)
self.assertAlmostEqual(scan_result.get(Metric.STDDEV), Decimal(1.09544), 4)
self.assertEqual(scan_result.get(Metric.SUM), 33)
self.assertEqual(scan_result.get(Metric.UNIQUENESS), 40)
self.assertEqual(scan_result.get(Metric.UNIQUE_COUNT), 2)
self.assertEqual(scan_result.get(Metric.VALID_COUNT), 11)
self.assertEqual(scan_result.get(Metric.VALID_PERCENTAGE), 91.66666666666667)
self.assertEqual(scan_result.get(Metric.VALUES_COUNT), 11)
self.assertEqual(scan_result.get(Metric.VALUES_PERCENTAGE), 91.66666666666667)
self.assertAlmostEqual(scan_result.get(Metric.VARIANCE), Decimal(1.2000), 4)
self.assertEqual(24, len(scan_result.measurements))
def test_empty_table_metrics(self):
self.sql_recreate_table(
[f"score {self.dialect.data_type_integer}"])
scan_result = self.scan({
KEY_METRICS: [
Metric.MAX,
Metric.ROW_COUNT
],
KEY_COLUMNS: {
'score': {
COLUMN_KEY_TESTS: [
'max == None',
'max < 5'
]
}
}
})
| 43.203593
| 119
| 0.428344
| 1,284
| 14,430
| 4.628505
| 0.128505
| 0.13293
| 0.148746
| 0.217399
| 0.813057
| 0.790678
| 0.774188
| 0.773852
| 0.758371
| 0.753828
| 0
| 0.059372
| 0.442065
| 14,430
| 333
| 120
| 43.333333
| 0.678798
| 0.039085
| 0
| 0.702703
| 0
| 0
| 0.087503
| 0.022525
| 0
| 0
| 0
| 0
| 0.236486
| 1
| 0.030405
| false
| 0
| 0.013514
| 0
| 0.047297
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
16bd4f266d596eeca6b21a0df059bd787db99e25
| 121
|
py
|
Python
|
Assignment 3. Paxos/Simulation/Agents/Learner.py
|
WailAbou/Distributed-Processing
|
46a36f1fd51d6f8b35cc639eb8002d81d7e09f2b
|
[
"MIT"
] | null | null | null |
Assignment 3. Paxos/Simulation/Agents/Learner.py
|
WailAbou/Distributed-Processing
|
46a36f1fd51d6f8b35cc639eb8002d81d7e09f2b
|
[
"MIT"
] | null | null | null |
Assignment 3. Paxos/Simulation/Agents/Learner.py
|
WailAbou/Distributed-Processing
|
46a36f1fd51d6f8b35cc639eb8002d81d7e09f2b
|
[
"MIT"
] | null | null | null |
from Simulation.Agents import Agent
from Simulation.Message import Message, MessageTypes
class Learner(Agent):
pass
| 20.166667
| 52
| 0.809917
| 15
| 121
| 6.533333
| 0.666667
| 0.285714
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.140496
| 121
| 6
| 53
| 20.166667
| 0.942308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0.25
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 1
| 0
|
0
| 7
|
16c8bb191fc67b51424268c00de913f8b67a6173
| 59
|
py
|
Python
|
model/__init__.py
|
Mullans/NeuriteNet
|
884abb634c4ddf9c35b494341b3eee3bdde1e7a3
|
[
"Apache-2.0"
] | null | null | null |
model/__init__.py
|
Mullans/NeuriteNet
|
884abb634c4ddf9c35b494341b3eee3bdde1e7a3
|
[
"Apache-2.0"
] | null | null | null |
model/__init__.py
|
Mullans/NeuriteNet
|
884abb634c4ddf9c35b494341b3eee3bdde1e7a3
|
[
"Apache-2.0"
] | null | null | null |
from .training import train_wt_vs_cko, train_wt_vs_replate
| 29.5
| 58
| 0.881356
| 11
| 59
| 4.181818
| 0.727273
| 0.304348
| 0.391304
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.084746
| 59
| 1
| 59
| 59
| 0.851852
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
bc7ae4754508a46cdbf92e48c9ab0cd8d0725750
| 14,056
|
py
|
Python
|
test/kernels/test_additive_and_product_kernels.py
|
skn123/gpytorch
|
756cf4862aadfc38ebd3f32b55514e8a2eefbd8e
|
[
"MIT"
] | null | null | null |
test/kernels/test_additive_and_product_kernels.py
|
skn123/gpytorch
|
756cf4862aadfc38ebd3f32b55514e8a2eefbd8e
|
[
"MIT"
] | null | null | null |
test/kernels/test_additive_and_product_kernels.py
|
skn123/gpytorch
|
756cf4862aadfc38ebd3f32b55514e8a2eefbd8e
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
import math
import unittest
import torch
import gpytorch
from gpytorch.kernels import LinearKernel, MaternKernel, RBFKernel, RFFKernel
class TestModel(gpytorch.models.ExactGP):
def __init__(self, train_x, train_y):
likelihood = gpytorch.likelihoods.GaussianLikelihood()
super().__init__(train_x, train_y, likelihood)
self.mean_module = gpytorch.means.ZeroMean()
self.covar_module = gpytorch.kernels.ScaleKernel(
RFFKernel(active_dims=[0], num_samples=10) + MaternKernel(nu=2.5, active_dims=[1, 2])
)
def forward(self, input):
mean = self.mean_module(input)
covar = self.covar_module(input)
return gpytorch.distributions.MultivariateNormal(mean, covar)
class TestModelNoStructure(gpytorch.models.ExactGP):
def __init__(self, train_x, train_y):
likelihood = gpytorch.likelihoods.GaussianLikelihood()
super().__init__(train_x, train_y, likelihood)
self.mean_module = gpytorch.means.ZeroMean()
self.covar_module = gpytorch.kernels.ScaleKernel(
RBFKernel(active_dims=[0], num_samples=10) + MaternKernel(nu=2.5, active_dims=[1, 2])
)
def forward(self, input):
mean = self.mean_module(input)
covar = self.covar_module(input)
return gpytorch.distributions.MultivariateNormal(mean, covar)
class TestAdditiveAndProductKernel(unittest.TestCase):
def test_computes_product_of_radial_basis_function(self):
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
b = torch.tensor([0, 2], dtype=torch.float).view(2, 1)
lengthscale = 2
kernel_1 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_2 = RBFKernel().initialize(lengthscale=lengthscale)
kernel = kernel_1 * kernel_2
actual = torch.tensor([[16, 4], [4, 0], [64, 36]], dtype=torch.float)
actual = actual.mul_(-0.5).div_(lengthscale ** 2).exp() ** 2
kernel.eval()
res = kernel(a, b).evaluate()
self.assertLess(torch.norm(res - actual), 2e-5)
def test_computes_sum_of_radial_basis_function(self):
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
b = torch.tensor([0, 2], dtype=torch.float).view(2, 1)
lengthscale = 2
kernel_1 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_2 = RBFKernel().initialize(lengthscale=lengthscale)
kernel = kernel_1 + kernel_2
actual = torch.tensor([[16, 4], [4, 0], [64, 36]], dtype=torch.float)
actual = actual.mul_(-0.5).div_(lengthscale ** 2).exp() * 2
kernel.eval()
res = kernel(a, b).evaluate()
self.assertLess(torch.norm(res - actual), 2e-5)
def test_computes_sum_of_radial_basis_function_diag(self):
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
b = torch.tensor([0, 2, 2], dtype=torch.float).view(3, 1)
actual = torch.tensor([0.2702, 2.000, 0.0222])
lengthscale = 2.0
kernel_1 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_2 = RBFKernel().initialize(lengthscale=lengthscale)
kernel = kernel_1 + kernel_2
kernel.eval()
kernel.eval()
res = kernel(a, b, diag=True)
self.assertLess(torch.norm(res - actual), 1e-3)
def test_computes_sum_of_three_radial_basis_function_diag(self):
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
b = torch.tensor([0, 2, 2], dtype=torch.float).view(3, 1)
actual = torch.tensor([0.4060, 3.000, 0.0333])
lengthscale = 2.0
kernel_1 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_2 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_3 = RBFKernel().initialize(lengthscale=lengthscale)
kernel = kernel_1 + kernel_2 + kernel_3
kernel.eval()
res = kernel(a, b, diag=True)
self.assertLess(torch.norm(res - actual), 1e-3)
def test_computes_product_of_radial_basis_function_diag(self):
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
b = torch.tensor([0, 2, 2], dtype=torch.float).view(3, 1)
actual = torch.tensor([2.4788e-03, 1.000, 1.3710e-06])
lengthscale = 2.0
kernel_1 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_2 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_3 = RBFKernel().initialize(lengthscale=lengthscale)
kernel = kernel_1 * kernel_2 * kernel_3
kernel.eval()
kernel.eval()
res = kernel(a, b, diag=True)
self.assertLess(torch.norm(res - actual), 1e-3)
def test_computes_product_of_three_radial_basis_function_diag(self):
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
b = torch.tensor([0, 2, 2], dtype=torch.float).view(3, 1)
actual = torch.tensor([1.8316e-02, 1.000, 1.2341e-04])
lengthscale = 2.0
kernel_1 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_2 = RBFKernel().initialize(lengthscale=lengthscale)
kernel = kernel_1 * kernel_2
kernel.eval()
kernel.eval()
res = kernel(a, b, diag=True)
self.assertLess(torch.norm(res - actual), 1e-3)
def test_computes_product_of_three_radial_basis_function(self):
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
b = torch.tensor([0, 2], dtype=torch.float).view(2, 1)
lengthscale = 2
kernel_1 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_2 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_3 = RBFKernel().initialize(lengthscale=lengthscale)
kernel = (kernel_1 * kernel_2) * kernel_3
self.assertEqual(len(kernel.kernels), 3)
for sub_kernel in kernel.kernels:
self.assertIsInstance(sub_kernel, RBFKernel)
actual = torch.tensor([[16, 4], [4, 0], [64, 36]], dtype=torch.float)
actual = actual.mul_(-0.5).div_(lengthscale ** 2).exp() ** 3
kernel.eval()
res = kernel(a, b).evaluate()
self.assertLess(torch.norm(res - actual), 2e-5)
kernel_1 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_2 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_3 = RBFKernel().initialize(lengthscale=lengthscale)
kernel = (kernel_1 * kernel_2) * kernel_3
self.assertEqual(len(kernel.kernels), 3)
for sub_kernel in kernel.kernels:
self.assertIsInstance(sub_kernel, RBFKernel)
kernel.eval()
res = kernel(a, b).evaluate()
self.assertLess(torch.norm(res - actual), 2e-5)
def test_computes_product_of_four_radial_basis_function(self):
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
b = torch.tensor([0, 2], dtype=torch.float).view(2, 1)
lengthscale = 2
kernel_1 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_2 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_3 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_4 = RBFKernel().initialize(lengthscale=lengthscale)
kernel = kernel_1 * kernel_2 * kernel_3 * kernel_4
self.assertEqual(len(kernel.kernels), 4)
for sub_kernel in kernel.kernels:
self.assertIsInstance(sub_kernel, RBFKernel)
actual = torch.tensor([[16, 4], [4, 0], [64, 36]], dtype=torch.float)
actual = actual.mul_(-0.5).div_(lengthscale ** 2).exp() ** 4
kernel.eval()
res = kernel(a, b).evaluate()
self.assertLess(torch.norm(res - actual), 2e-5)
def test_computes_sum_of_four_radial_basis_function(self):
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
b = torch.tensor([0, 2], dtype=torch.float).view(2, 1)
lengthscale = 2
kernel_1 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_2 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_3 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_4 = RBFKernel().initialize(lengthscale=lengthscale)
kernel = (kernel_1 + kernel_2) + (kernel_3 + kernel_4)
self.assertEqual(len(kernel.kernels), 4)
for sub_kernel in kernel.kernels:
self.assertIsInstance(sub_kernel, RBFKernel)
actual = (
torch.tensor([[16, 4], [4, 0], [64, 36]], dtype=torch.float).mul_(-0.5).div_(lengthscale ** 2).exp() * 4
)
kernel.eval()
res = kernel(a, b).evaluate()
self.assertLess(torch.norm(res - actual), 2e-5)
def test_computes_sum_of_three_radial_basis_function(self):
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
b = torch.tensor([0, 2], dtype=torch.float).view(2, 1)
lengthscale = 2
kernel_1 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_2 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_3 = RBFKernel().initialize(lengthscale=lengthscale)
kernel = (kernel_1 + kernel_2) + kernel_3
self.assertEqual(len(kernel.kernels), 3)
for sub_kernel in kernel.kernels:
self.assertIsInstance(sub_kernel, RBFKernel)
actual = (
torch.tensor([[16, 4], [4, 0], [64, 36]], dtype=torch.float).mul_(-0.5).div_(lengthscale ** 2).exp() * 3
)
kernel.eval()
res = kernel(a, b).evaluate()
self.assertLess(torch.norm(res - actual), 2e-5)
kernel_1 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_2 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_3 = RBFKernel().initialize(lengthscale=lengthscale)
kernel = kernel_1 + (kernel_2 + kernel_3)
self.assertEqual(len(kernel.kernels), 3)
for sub_kernel in kernel.kernels:
self.assertIsInstance(sub_kernel, RBFKernel)
kernel.eval()
res = kernel(a, b).evaluate()
self.assertLess(torch.norm(res - actual), 2e-5)
def test_computes_sum_radial_basis_function_gradient(self):
softplus = torch.nn.functional.softplus
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
b = torch.tensor([0, 2, 2], dtype=torch.float).view(3, 1)
lengthscale = 2
param = math.log(math.exp(lengthscale) - 1) * torch.ones(3, 3)
param.requires_grad_()
diffs = a.expand(3, 3) - b.expand(3, 3).transpose(0, 1)
actual_output = (-0.5 * (diffs / softplus(param)) ** 2).exp()
actual_output.backward(torch.eye(3))
actual_param_grad = param.grad.sum() * 2
kernel_1 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_2 = RBFKernel().initialize(lengthscale=lengthscale)
kernel = kernel_1 + kernel_2
kernel.eval()
output = kernel(a, b).evaluate()
output.backward(gradient=torch.eye(3))
res = kernel.kernels[0].raw_lengthscale.grad + kernel.kernels[1].raw_lengthscale.grad
self.assertLess(torch.norm(res - actual_param_grad), 2e-5)
def test_computes_sum_three_radial_basis_function_gradient(self):
softplus = torch.nn.functional.softplus
a = torch.tensor([4, 2, 8], dtype=torch.float).view(3, 1)
b = torch.tensor([0, 2, 2], dtype=torch.float).view(3, 1)
lengthscale = 2
param = math.log(math.exp(lengthscale) - 1) * torch.ones(3, 3)
param.requires_grad_()
diffs = a.expand(3, 3) - b.expand(3, 3).transpose(0, 1)
actual_output = (-0.5 * (diffs / softplus(param)) ** 2).exp()
actual_output.backward(torch.eye(3))
actual_param_grad = param.grad.sum() * 3
kernel_1 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_2 = RBFKernel().initialize(lengthscale=lengthscale)
kernel_3 = RBFKernel().initialize(lengthscale=lengthscale)
kernel = kernel_1 + kernel_2 + kernel_3
kernel.eval()
output = kernel(a, b).evaluate()
output.backward(gradient=torch.eye(3))
res = (
kernel.kernels[0].raw_lengthscale.grad
+ kernel.kernels[1].raw_lengthscale.grad
+ kernel.kernels[2].raw_lengthscale.grad
)
self.assertLess(torch.norm(res - actual_param_grad), 2e-5)
def test_is_stationary(self):
kernel_1 = RBFKernel().initialize(lengthscale=1)
kernel_2 = RBFKernel().initialize(lengthscale=2)
kernel_3 = LinearKernel().initialize()
self.assertTrue((kernel_1 + kernel_2).is_stationary)
self.assertTrue((kernel_1 * kernel_2).is_stationary)
self.assertFalse((kernel_1 + kernel_3).is_stationary)
self.assertFalse((kernel_1 * kernel_3).is_stationary)
def test_kernel_output(self):
train_x = torch.randn(1000, 3)
train_y = torch.randn(1000)
model = TestModel(train_x, train_y)
# Make sure that the prior kernel is the correct type
model.train()
output = model(train_x).lazy_covariance_matrix.evaluate_kernel()
self.assertIsInstance(output, gpytorch.lazy.SumLazyTensor)
# Make sure that the prior predictive kernel is the correct type
model.train()
output = model.likelihood(model(train_x)).lazy_covariance_matrix.evaluate_kernel()
self.assertIsInstance(output, gpytorch.lazy.AddedDiagLazyTensor)
def test_kernel_output_no_structure(self):
train_x = torch.randn(1000, 3)
train_y = torch.randn(1000)
model = TestModelNoStructure(train_x, train_y)
# Make sure that the prior kernel is the correct type
model.train()
output = model(train_x).lazy_covariance_matrix.evaluate_kernel()
self.assertIsInstance(output, gpytorch.lazy.ConstantMulLazyTensor)
# Make sure that the prior predictive kernel is the correct type
model.train()
output = model.likelihood(model(train_x)).lazy_covariance_matrix.evaluate_kernel()
self.assertIsInstance(output, gpytorch.lazy.AddedDiagLazyTensor)
if __name__ == "__main__":
unittest.main()
| 40.624277
| 116
| 0.646059
| 1,781
| 14,056
| 4.930938
| 0.085345
| 0.088704
| 0.140059
| 0.182077
| 0.945115
| 0.934411
| 0.933159
| 0.933159
| 0.930654
| 0.922569
| 0
| 0.042633
| 0.224032
| 14,056
| 345
| 117
| 40.742029
| 0.762538
| 0.017857
| 0
| 0.753788
| 0
| 0
| 0.00058
| 0
| 0
| 0
| 0
| 0
| 0.128788
| 1
| 0.07197
| false
| 0
| 0.018939
| 0
| 0.109848
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
bcad046946333b6b62bfaeacc59777938146d54a
| 9,628
|
py
|
Python
|
src/connect_board.py
|
ZehaoJin/AlphaZero_Connect4
|
715a74d6890fb64a77c6b4742a8723925331f357
|
[
"Apache-2.0"
] | 1
|
2020-03-22T16:37:42.000Z
|
2020-03-22T16:37:42.000Z
|
src/connect_board.py
|
ZehaoJin/AlphaZero_Connect4
|
715a74d6890fb64a77c6b4742a8723925331f357
|
[
"Apache-2.0"
] | null | null | null |
src/connect_board.py
|
ZehaoJin/AlphaZero_Connect4
|
715a74d6890fb64a77c6b4742a8723925331f357
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import numpy as np
class board():
def __init__(self):
self.init_board = np.zeros([3,3]).astype(str)
self.init_board[self.init_board == "0.0"] = " "
self.player = 0
self.current_board = self.init_board
def drop_piece(self, slot):
row = slot//3
column = slot%3
if self.current_board[row, column] != " ":
return "Invalid move"
else:
if self.player == 0:
self.current_board[row, column] = "O"
self.player = 1
elif self.player == 1:
self.current_board[row, column] = "X"
self.player = 0
def check_winner(self):
if self.player == 1:
for row in range(3):
for col in range(3):
if self.current_board[row, col] != " ":
# rows
try:
if self.current_board[row, col] == "O" and self.current_board[row + 1, col] == "O" and \
self.current_board[row + 2, col] == "O":
#print("row")
return True
except IndexError:
next
# columns
try:
if self.current_board[row, col] == "O" and self.current_board[row, col + 1] == "O" and \
self.current_board[row, col + 2] == "O":
#print("col")
return True
except IndexError:
next
# \ diagonal
try:
if self.current_board[row, col] == "O" and self.current_board[row + 1, col + 1] == "O" and \
self.current_board[row + 2, col + 2] == "O":
#print("\\")
return True
except IndexError:
next
# / diagonal
try:
if self.current_board[row, col] == "O" and self.current_board[row + 1, col - 1] == "O" and \
self.current_board[row + 2, col - 2] == "O" \
and (col-2) >= 0:
#print("/")
return True
except IndexError:
next
if self.player == 0:
for row in range(3):
for col in range(3):
if self.current_board[row, col] != " ":
# rows
try:
if self.current_board[row, col] == "X" and self.current_board[row + 1, col] == "X" and \
self.current_board[row + 2, col] == "X":
return True
except IndexError:
next
# columns
try:
if self.current_board[row, col] == "X" and self.current_board[row, col + 1] == "X" and \
self.current_board[row, col + 2] == "X":
return True
except IndexError:
next
# \ diagonal
try:
if self.current_board[row, col] == "X" and self.current_board[row + 1, col + 1] == "X" and \
self.current_board[row + 2, col + 2] == "X":
return True
except IndexError:
next
# / diagonal
try:
if self.current_board[row, col] == "X" and self.current_board[row + 1, col - 1] == "X" and \
self.current_board[row + 2, col - 2] == "X"\
and (col-2) >= 0:
return True
except IndexError:
next
def actions(self): # returns all possible moves
acts = []
for slot in range(9):
row = slot//3
column = slot%3
if self.current_board[row, column] == " ":
acts.append(slot)
return acts
'''
def __init__(self):
self.init_board = np.zeros([6,7]).astype(str)
self.init_board[self.init_board == "0.0"] = " "
self.player = 0
self.current_board = self.init_board
def drop_piece(self, column):
if self.current_board[0, column] != " ":
return "Invalid move"
else:
row = 0; pos = " "
while (pos == " "):
if row == 6:
row += 1
break
pos = self.current_board[row, column]
row += 1
if self.player == 0:
self.current_board[row-2, column] = "O"
self.player = 1
elif self.player == 1:
self.current_board[row-2, column] = "X"
self.player = 0
def check_winner(self):
if self.player == 1:
for row in range(6):
for col in range(7):
if self.current_board[row, col] != " ":
# rows
try:
if self.current_board[row, col] == "O" and self.current_board[row + 1, col] == "O" and \
self.current_board[row + 2, col] == "O" and self.current_board[row + 3, col] == "O":
#print("row")
return True
except IndexError:
next
# columns
try:
if self.current_board[row, col] == "O" and self.current_board[row, col + 1] == "O" and \
self.current_board[row, col + 2] == "O" and self.current_board[row, col + 3] == "O":
#print("col")
return True
except IndexError:
next
# \ diagonal
try:
if self.current_board[row, col] == "O" and self.current_board[row + 1, col + 1] == "O" and \
self.current_board[row + 2, col + 2] == "O" and self.current_board[row + 3, col + 3] == "O":
#print("\\")
return True
except IndexError:
next
# / diagonal
try:
if self.current_board[row, col] == "O" and self.current_board[row + 1, col - 1] == "O" and \
self.current_board[row + 2, col - 2] == "O" and self.current_board[row + 3, col - 3] == "O"\
and (col-3) >= 0:
#print("/")
return True
except IndexError:
next
if self.player == 0:
for row in range(6):
for col in range(7):
if self.current_board[row, col] != " ":
# rows
try:
if self.current_board[row, col] == "X" and self.current_board[row + 1, col] == "X" and \
self.current_board[row + 2, col] == "X" and self.current_board[row + 3, col] == "X":
return True
except IndexError:
next
# columns
try:
if self.current_board[row, col] == "X" and self.current_board[row, col + 1] == "X" and \
self.current_board[row, col + 2] == "X" and self.current_board[row, col + 3] == "X":
return True
except IndexError:
next
# \ diagonal
try:
if self.current_board[row, col] == "X" and self.current_board[row + 1, col + 1] == "X" and \
self.current_board[row + 2, col + 2] == "X" and self.current_board[row + 3, col + 3] == "X":
return True
except IndexError:
next
# / diagonal
try:
if self.current_board[row, col] == "X" and self.current_board[row + 1, col - 1] == "X" and \
self.current_board[row + 2, col - 2] == "X" and self.current_board[row + 3, col - 3] == "X"\
and (col-3) >= 0:
return True
except IndexError:
next
def actions(self): # returns all possible moves
acts = []
for col in range(7):
if self.current_board[0, col] == " ":
acts.append(col)
return acts
'''
| 46.066986
| 124
| 0.365185
| 911
| 9,628
| 3.759605
| 0.070252
| 0.228029
| 0.331679
| 0.371679
| 0.948905
| 0.921752
| 0.921168
| 0.92
| 0.856934
| 0.848467
| 0
| 0.024867
| 0.532198
| 9,628
| 208
| 125
| 46.288462
| 0.735568
| 0.016826
| 0
| 0.55814
| 0
| 0
| 0.010407
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.046512
| false
| 0
| 0.011628
| 0
| 0.186047
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
bcee20a134dccbbad6ef8b205ffca332b57bd569
| 12,233
|
py
|
Python
|
hax/_helpers.py
|
brandtbucher/hax
|
16a7555dfcd631a3ca67a7afef769142b1065563
|
[
"MIT"
] | 32
|
2019-10-16T08:05:44.000Z
|
2022-02-23T22:39:55.000Z
|
hax/_helpers.py
|
brandtbucher/hax
|
16a7555dfcd631a3ca67a7afef769142b1065563
|
[
"MIT"
] | 11
|
2019-10-16T16:49:02.000Z
|
2021-01-14T18:01:24.000Z
|
hax/_helpers.py
|
brandtbucher/hax
|
16a7555dfcd631a3ca67a7afef769142b1065563
|
[
"MIT"
] | null | null | null |
from sys import version_info as _version_info
from typing import Hashable as _Hashable
_USAGE_MESSAGE = "HAX inline bytecode functions are not meant to be used directly; you must decorate any functions that use them with @hax."
class HaxUsageError(RuntimeError):
pass
def HAX_LABEL(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def LABEL(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BEFORE_ASYNC_WITH() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 8) <= _version_info: # pragma: no cover
def BEGIN_FINALLY() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BINARY_ADD() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BINARY_AND() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BINARY_FLOOR_DIVIDE() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BINARY_LSHIFT() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BINARY_MATRIX_MULTIPLY() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BINARY_MODULO() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BINARY_MULTIPLY() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BINARY_OR() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BINARY_POWER() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BINARY_RSHIFT() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BINARY_SUBSCR() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BINARY_SUBTRACT() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BINARY_TRUE_DIVIDE() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BINARY_XOR() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if _version_info < (3, 8): # pragma: no cover
def BREAK_LOOP() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BUILD_CONST_KEY_MAP(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BUILD_LIST(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if _version_info < (3, 9): # pragma: no cover
def BUILD_LIST_UNPACK(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BUILD_MAP(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if _version_info < (3, 9): # pragma: no cover
def BUILD_MAP_UNPACK(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BUILD_MAP_UNPACK_WITH_CALL(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BUILD_SET(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if _version_info < (3, 9): # pragma: no cover
def BUILD_SET_UNPACK(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BUILD_SLICE(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BUILD_STRING(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BUILD_TUPLE(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if _version_info < (3, 9): # pragma: no cover
def BUILD_TUPLE_UNPACK(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def BUILD_TUPLE_UNPACK_WITH_CALL(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 8) <= _version_info: # pragma: no cover
def CALL_FINALLY(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def CALL_FUNCTION(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def CALL_FUNCTION_EX(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def CALL_FUNCTION_KW(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 7) <= _version_info: # pragma: no cover
def CALL_METHOD(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def COMPARE_OP(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 9) <= _version_info: # pragma: no cover
def CONTAINS_OP(invert: bool) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if _version_info < (3, 8): # pragma: no cover
def CONTINUE_LOOP(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def DELETE_ATTR(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def DELETE_DEREF(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def DELETE_FAST(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def DELETE_GLOBAL(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def DELETE_NAME(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def DELETE_SUBSCR() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 9) <= _version_info: # pragma: no cover
def DICT_MERGE(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def DICT_UPDATE(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def DUP_TOP() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def DUP_TOP_TWO() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 8) <= _version_info: # pragma: no cover
def END_ASYNC_FOR() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if _version_info < (3, 9): # pragma: no cover
def END_FINALLY() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def EXTENDED_ARG(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def FORMAT_VALUE(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def FOR_ITER(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def GET_AITER() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def GET_ANEXT() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def GET_AWAITABLE() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def GET_ITER() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def GET_YIELD_FROM_ITER() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def IMPORT_FROM(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def IMPORT_NAME(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def IMPORT_STAR() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def INPLACE_ADD() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def INPLACE_AND() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def INPLACE_FLOOR_DIVIDE() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def INPLACE_LSHIFT() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def INPLACE_MATRIX_MULTIPLY() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def INPLACE_MODULO() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def INPLACE_MULTIPLY() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def INPLACE_OR() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def INPLACE_POWER() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def INPLACE_RSHIFT() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def INPLACE_SUBTRACT() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def INPLACE_TRUE_DIVIDE() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def INPLACE_XOR() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 9) <= _version_info: # pragma: no cover
def IS_OP(invert: bool) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def JUMP_ABSOLUTE(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def JUMP_FORWARD(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def JUMP_IF_FALSE_OR_POP(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 9) <= _version_info: # pragma: no cover
def JUMP_IF_NOT_EXC_MATCH(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def JUMP_IF_TRUE_OR_POP(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def LIST_APPEND(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 9) <= _version_info: # pragma: no cover
def LIST_EXTEND(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def LIST_TO_TUPLE() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def LOAD_ASSERTION_ERROR() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def LOAD_ATTR(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def LOAD_BUILD_CLASS() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def LOAD_CLASSDEREF(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def LOAD_CLOSURE(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def LOAD_CONST(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def LOAD_DEREF(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def LOAD_FAST(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def LOAD_GLOBAL(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 7) <= _version_info: # pragma: no cover
def LOAD_METHOD(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def LOAD_NAME(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def MAKE_FUNCTION(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def MAP_ADD(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def NOP() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def POP_BLOCK() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def POP_EXCEPT() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 8) <= _version_info: # pragma: no cover
def POP_FINALLY(arg: bool) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def POP_JUMP_IF_FALSE(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def POP_JUMP_IF_TRUE(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def POP_TOP() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def PRINT_EXPR() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def RAISE_VARARGS(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 9) <= _version_info: # pragma: no cover
def RERAISE() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def RETURN_VALUE() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 8) <= _version_info: # pragma: no cover
def ROT_FOUR() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def ROT_THREE() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def ROT_TWO() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 9) <= _version_info: # pragma: no cover
def SET_UPDATE(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def SETUP_ANNOTATIONS() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def SETUP_ASYNC_WITH(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if _version_info < (3, 8): # pragma: no cover
def SETUP_EXCEPT(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def SETUP_FINALLY(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if _version_info < (3, 8): # pragma: no cover
def SETUP_LOOP(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def SETUP_WITH(arg: _Hashable) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def SET_ADD(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def STORE_ATTR(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if _version_info < (3, 7): # pragma: no cover
def STORE_ANNOTATION(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def STORE_DEREF(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def STORE_FAST(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def STORE_GLOBAL(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def STORE_NAME(arg: str) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def STORE_SUBSCR() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def UNARY_INVERT() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def UNARY_NEGATIVE() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def UNARY_NOT() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def UNARY_POSITIVE() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def UNPACK_EX(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def UNPACK_SEQUENCE(arg: int) -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if _version_info < (3, 9): # pragma: no cover
def WITH_CLEANUP_FINISH() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def WITH_CLEANUP_START() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
if (3, 9) <= _version_info: # pragma: no cover
def WITH_EXCEPT_START() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def YIELD_FROM() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
def YIELD_VALUE() -> None:
raise HaxUsageError(_USAGE_MESSAGE)
| 20.120066
| 140
| 0.709311
| 1,541
| 12,233
| 5.285529
| 0.099935
| 0.204788
| 0.372744
| 0.457459
| 0.908165
| 0.906446
| 0.885451
| 0.647023
| 0.524002
| 0.281277
| 0
| 0.005241
| 0.188915
| 12,233
| 607
| 141
| 20.153213
| 0.815662
| 0.03605
| 0
| 0.530945
| 0
| 0.003257
| 0.010285
| 0
| 0
| 0
| 0
| 0
| 0.003257
| 1
| 0.449511
| false
| 0.003257
| 0.016287
| 0
| 0.469055
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bcfa0d42d4f7b98a5f5115c8c128802cf673e57e
| 16,856
|
py
|
Python
|
test/test_fir.py
|
league/bifrost
|
be11bf96da2a5a09158a7e3a779ae99817074db8
|
[
"BSD-3-Clause"
] | 52
|
2017-01-08T04:49:25.000Z
|
2021-12-28T18:49:12.000Z
|
test/test_fir.py
|
league/bifrost
|
be11bf96da2a5a09158a7e3a779ae99817074db8
|
[
"BSD-3-Clause"
] | 131
|
2016-12-22T19:08:35.000Z
|
2022-03-31T22:55:26.000Z
|
test/test_fir.py
|
epic-astronomy/bifrost
|
9bc7000ce1b42a55b70dff56a2e03253358ba8eb
|
[
"BSD-3-Clause"
] | 19
|
2017-01-26T20:19:29.000Z
|
2022-03-04T15:40:25.000Z
|
# Copyright (c) 2017-2020, The Bifrost Authors. All rights reserved.
# Copyright (c) 2017-2020, The University of New Mexico. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
# * Neither the name of The Bifrost Authors nor the names of its
# contributors may be used to endorse or promote products derived
# from this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS ``AS IS'' AND ANY
# EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
# OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""This set of unit tests check the functionality
on the bifrost FIR filter."""
# Python2 compatibility
from __future__ import division
import ctypes
import unittest
import numpy as np
from scipy.signal import lfilter, lfiltic
from bifrost.fir import Fir
import bifrost as bf
MTOL = 1e-6 # Relative tolerance at the mean magnitude
RTOL = 1e-1
def compare(result, gold):
#np.testing.assert_allclose(result, gold, RTOL, ATOL)
# Note: We compare using an absolute tolerance equal to a fraction of the
# mean magnitude. This ignores large relative errors on values with
# magnitudes much smaller than the mean.
absmean = np.abs(gold).mean()
np.testing.assert_allclose(result, gold, rtol=RTOL, atol=MTOL * absmean)
class TestFIR(unittest.TestCase):
def setUp(self):
np.random.seed(1234)
self.shape2D = (10000, 96*2)
self.shape3D = (10000, 48, 4)
self.coeffs = np.array(( 0.0035002, -0.0053712, 0.0090177, -0.013789, 0.0196580,
-0.0264910, 0.0340400, -0.0419570, 0.049807, -0.0571210,
0.0634200, -0.0682750, 0.0713370, 0.927620, 0.0713370,
-0.0682750, 0.0634200, -0.0571210, 0.049807, -0.0419570,
0.0340400, -0.0264910, 0.0196580, -0.013789, 0.0090177,
-0.0053712, 0.0035002), dtype=np.float64)
def test_2d_initial(self):
shape = self.shape2D
known_data = np.random.normal(size=shape).astype(np.float32).view(np.complex64)
idata = bf.ndarray(known_data, space='cuda')
odata = bf.empty_like(idata)
coeffs = self.coeffs*1.0
coeffs.shape += (1,)
coeffs = np.repeat(coeffs, idata.shape[1], axis=1)
coeffs.shape = (coeffs.shape[0],idata.shape[1])
coeffs = bf.ndarray(coeffs, space='cuda')
fir = Fir()
fir.init(coeffs, 1)
fir.execute(idata, odata)
odata = odata.copy('system')
for i in range(known_data.shape[1]):
zf = lfiltic(self.coeffs, 1.0, 0.0)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i], zi=zf)
compare(odata[:,i], known_result)
def test_3d_initial(self):
shape = self.shape3D
known_data = np.random.normal(size=shape).astype(np.float32).view(np.complex64)
idata = bf.ndarray(known_data, space='cuda')
odata = bf.empty_like(idata)
coeffs = self.coeffs*1.0
coeffs.shape += (1,)
coeffs = np.repeat(coeffs, idata.shape[1]*idata.shape[2], axis=1)
coeffs.shape = (coeffs.shape[0],idata.shape[1],idata.shape[2])
coeffs = bf.ndarray(coeffs, space='cuda')
fir = Fir()
fir.init(coeffs, 1)
fir.execute(idata, odata)
odata = odata.copy('system')
for i in range(known_data.shape[1]):
for j in range(known_data.shape[2]):
zf = lfiltic(self.coeffs, 1.0, 0.0)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i,j], zi=zf)
compare(odata[:,i,j], known_result)
def test_2d_and_3d(self):
shape = self.shape3D
known_data = np.random.normal(size=shape).astype(np.float32).view(np.complex64)
idata = bf.ndarray(known_data, space='cuda')
odata = bf.empty_like(idata)
coeffs = self.coeffs*1.0
coeffs.shape += (1,)
coeffs = np.repeat(coeffs, idata.shape[1]*idata.shape[2], axis=1)
coeffs = bf.ndarray(coeffs, space='cuda')
fir = Fir()
fir.init(coeffs, 1)
fir.execute(idata, odata)
odata = odata.copy('system')
for i in range(known_data.shape[1]):
for j in range(known_data.shape[2]):
zf = lfiltic(self.coeffs, 1.0, 0.0)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i,j], zi=zf)
compare(odata[:,i,j], known_result)
def test_3d_and_2d(self):
shape = self.shape2D
known_data = np.random.normal(size=shape).astype(np.float32).view(np.complex64)
idata = bf.ndarray(known_data, space='cuda')
odata = bf.empty_like(idata)
coeffs = self.coeffs*1.0
coeffs.shape += (1,)
coeffs = np.repeat(coeffs, idata.shape[1], axis=1)
coeffs.shape = (coeffs.shape[0],idata.shape[1]//2, 2)
coeffs = bf.ndarray(coeffs, space='cuda')
fir = Fir()
fir.init(coeffs, 1)
fir.execute(idata, odata)
odata = odata.copy('system')
for i in range(known_data.shape[1]):
zf = lfiltic(self.coeffs, 1.0, 0.0)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i], zi=zf)
compare(odata[:,i], known_result)
def test_2d_active(self):
shape = self.shape2D
known_data = np.random.normal(size=shape).astype(np.float32).view(np.complex64)
idata = bf.ndarray(known_data, space='cuda')
odata = bf.empty_like(idata)
coeffs = self.coeffs*1.0
coeffs.shape += (1,)
coeffs = np.repeat(coeffs, idata.shape[1], axis=1)
coeffs.shape = (coeffs.shape[0],idata.shape[1])
coeffs = bf.ndarray(coeffs, space='cuda')
fir = Fir()
fir.init(coeffs, 1)
fir.execute(idata, odata)
fir.execute(idata, odata)
odata = odata.copy('system')
for i in range(known_data.shape[1]):
zf = lfiltic(self.coeffs, 1.0, 0.0)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i], zi=zf)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i], zi=zf)
compare(odata[:,i], known_result)
def test_3d_active(self):
shape = self.shape3D
known_data = np.random.normal(size=shape).astype(np.float32).view(np.complex64)
idata = bf.ndarray(known_data, space='cuda')
odata = bf.empty_like(idata)
coeffs = self.coeffs*1.0
coeffs.shape += (1,)
coeffs = np.repeat(coeffs, idata.shape[1]*idata.shape[2], axis=1)
coeffs.shape = (coeffs.shape[0],idata.shape[1],idata.shape[2])
coeffs = bf.ndarray(coeffs, space='cuda')
fir = Fir()
fir.init(coeffs, 1)
fir.execute(idata, odata)
fir.execute(idata, odata)
odata = odata.copy('system')
for i in range(known_data.shape[1]):
for j in range(known_data.shape[2]):
zf = lfiltic(self.coeffs, 1.0, 0.0)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i,j], zi=zf)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i,j], zi=zf)
compare(odata[:,i,j], known_result)
def test_2d_decimate_initial(self):
shape = self.shape2D
known_data = np.random.normal(size=shape).astype(np.float32).view(np.complex64)
idata = bf.ndarray(known_data, space='cuda')
odata = bf.empty((idata.shape[0]//2, idata.shape[1]), dtype=idata.dtype, space='cuda')
coeffs = self.coeffs*1.0
coeffs.shape += (1,)
coeffs = np.repeat(coeffs, idata.shape[1], axis=1)
coeffs.shape = (coeffs.shape[0],idata.shape[1])
coeffs = bf.ndarray(coeffs, space='cuda')
fir = Fir()
fir.init(coeffs, 2)
fir.execute(idata, odata)
odata = odata.copy('system')
for i in range(known_data.shape[1]):
zf = lfiltic(self.coeffs, 1.0, 0.0)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i], zi=zf)
known_result = known_result[0::2]
compare(odata[:,i], known_result)
def test_3d_decimate_initial(self):
shape = self.shape3D
known_data = np.random.normal(size=shape).astype(np.float32).view(np.complex64)
idata = bf.ndarray(known_data, space='cuda')
odata = bf.empty((idata.shape[0]//2, idata.shape[1], idata.shape[2]), dtype=idata.dtype, space='cuda')
coeffs = self.coeffs*1.0
coeffs.shape += (1,)
coeffs = np.repeat(coeffs, idata.shape[1]*idata.shape[2], axis=1)
coeffs.shape = (coeffs.shape[0],idata.shape[1],idata.shape[2])
coeffs = bf.ndarray(coeffs, space='cuda')
fir = Fir()
fir.init(coeffs, 2)
fir.execute(idata, odata)
odata = odata.copy('system')
for i in range(known_data.shape[1]):
for j in range(known_data.shape[2]):
zf = lfiltic(self.coeffs, 1.0, 0.0)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i,j], zi=zf)
known_result = known_result[0::2]
compare(odata[:,i,j], known_result)
def test_2d_decimate_active(self):
shape = self.shape2D
known_data = np.random.normal(size=shape).astype(np.float32).view(np.complex64)
idata = bf.ndarray(known_data, space='cuda')
odata = bf.empty((idata.shape[0]//2, idata.shape[1]), dtype=idata.dtype, space='cuda')
coeffs = self.coeffs*1.0
coeffs.shape += (1,)
coeffs = np.repeat(coeffs, idata.shape[1], axis=1)
coeffs.shape = (coeffs.shape[0],idata.shape[1])
coeffs = bf.ndarray(coeffs, space='cuda')
fir = Fir()
fir.init(coeffs, 2)
fir.execute(idata, odata)
fir.execute(idata, odata)
odata = odata.copy('system')
for i in range(known_data.shape[1]):
zf = lfiltic(self.coeffs, 1.0, 0.0)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i], zi=zf)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i], zi=zf)
known_result = known_result[0::2]
compare(odata[:,i], known_result)
def test_3d_decimate_active(self):
shape = self.shape3D
known_data = np.random.normal(size=shape).astype(np.float32).view(np.complex64)
idata = bf.ndarray(known_data, space='cuda')
odata = bf.empty((idata.shape[0]//2, idata.shape[1], idata.shape[2]), dtype=idata.dtype, space='cuda')
coeffs = self.coeffs*1.0
coeffs.shape += (1,)
coeffs = np.repeat(coeffs, idata.shape[1]*idata.shape[2], axis=1)
coeffs.shape = (coeffs.shape[0],idata.shape[1],idata.shape[2])
coeffs = bf.ndarray(coeffs, space='cuda')
fir = Fir()
fir.init(coeffs, 2)
fir.execute(idata, odata)
fir.execute(idata, odata)
odata = odata.copy('system')
for i in range(known_data.shape[1]):
for j in range(known_data.shape[2]):
zf = lfiltic(self.coeffs, 1.0, 0.0)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i,j], zi=zf)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i,j], zi=zf)
known_result = known_result[0::2]
compare(odata[:,i,j], known_result)
def test_2d_update_coeffs(self):
shape = self.shape2D
known_data = np.random.normal(size=shape).astype(np.float32).view(np.complex64)
idata = bf.ndarray(known_data, space='cuda')
odata = bf.empty_like(idata)
coeffs = self.coeffs*1.0
coeffs.shape += (1,)
coeffs = np.repeat(coeffs, idata.shape[1], axis=1)
coeffs.shape = (coeffs.shape[0],idata.shape[1])
coeffs2 = coeffs*2.0
coeffs = bf.ndarray(coeffs, space='cuda')
coeffs2 = bf.ndarray(coeffs2, space='cuda')
fir = Fir()
fir.init(coeffs, 1)
fir.execute(idata, odata)
fir.set_coeffs(coeffs2)
fir.execute(idata, odata)
odata = odata.copy('system')
for i in range(known_data.shape[1]):
zf = lfiltic(self.coeffs, 1.0, 0.0)
known_result, zf = lfilter(self.coeffs*2.0, 1.0, known_data[:,i], zi=zf)
compare(odata[:,i], known_result)
def test_3d_update_coeffs(self):
shape = self.shape3D
known_data = np.random.normal(size=shape).astype(np.float32).view(np.complex64)
idata = bf.ndarray(known_data, space='cuda')
odata = bf.empty_like(idata)
coeffs = self.coeffs*1.0
coeffs.shape += (1,)
coeffs = np.repeat(coeffs, idata.shape[1]*idata.shape[2], axis=1)
coeffs.shape = (coeffs.shape[0],idata.shape[1],idata.shape[2])
coeffs2 = coeffs*2.0
coeffs = bf.ndarray(coeffs, space='cuda')
coeffs2 = bf.ndarray(coeffs2, space='cuda')
fir = Fir()
fir.init(coeffs, 1)
fir.execute(idata, odata)
fir.set_coeffs(coeffs2)
fir.execute(idata, odata)
odata = odata.copy('system')
for i in range(known_data.shape[1]):
for j in range(known_data.shape[2]):
zf = lfiltic(self.coeffs, 1.0, 0.0)
known_result, zf = lfilter(self.coeffs*2.0, 1.0, known_data[:,i,j], zi=zf)
compare(odata[:,i,j], known_result)
def test_2d_reset_state(self):
shape = self.shape2D
known_data = np.random.normal(size=shape).astype(np.float32).view(np.complex64)
idata = bf.ndarray(known_data, space='cuda')
odata = bf.empty_like(idata)
coeffs = self.coeffs*1.0
coeffs.shape += (1,)
coeffs = np.repeat(coeffs, idata.shape[1], axis=1)
coeffs.shape = (coeffs.shape[0],idata.shape[1])
coeffs = bf.ndarray(coeffs, space='cuda')
fir = Fir()
fir.init(coeffs, 1)
fir.execute(idata, odata)
fir.reset_state()
fir.execute(idata, odata)
odata = odata.copy('system')
for i in range(known_data.shape[1]):
zf = lfiltic(self.coeffs, 1.0, 0.0)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i], zi=zf)
compare(odata[:,i], known_result)
def test_3d_reset_state(self):
shape = self.shape3D
known_data = np.random.normal(size=shape).astype(np.float32).view(np.complex64)
idata = bf.ndarray(known_data, space='cuda')
odata = bf.empty_like(idata)
coeffs = self.coeffs*1.0
coeffs.shape += (1,)
coeffs = np.repeat(coeffs, idata.shape[1]*idata.shape[2], axis=1)
coeffs.shape = (coeffs.shape[0],idata.shape[1],idata.shape[2])
coeffs2 = coeffs*2.0
coeffs = bf.ndarray(coeffs, space='cuda')
coeffs2 = bf.ndarray(coeffs2, space='cuda')
fir = Fir()
fir.init(coeffs, 1)
fir.execute(idata, odata)
fir.reset_state()
fir.execute(idata, odata)
odata = odata.copy('system')
for i in range(known_data.shape[1]):
for j in range(known_data.shape[2]):
zf = lfiltic(self.coeffs, 1.0, 0.0)
known_result, zf = lfilter(self.coeffs, 1.0, known_data[:,i,j], zi=zf)
compare(odata[:,i,j], known_result)
| 43.33162
| 110
| 0.588811
| 2,312
| 16,856
| 4.221453
| 0.109429
| 0.061783
| 0.04959
| 0.054098
| 0.808607
| 0.794467
| 0.794467
| 0.786885
| 0.786885
| 0.786885
| 0
| 0.05107
| 0.276281
| 16,856
| 388
| 111
| 43.443299
| 0.748996
| 0.115093
| 0
| 0.869707
| 0
| 0
| 0.015056
| 0
| 0
| 0
| 0
| 0
| 0.003257
| 1
| 0.052117
| false
| 0
| 0.022801
| 0
| 0.078176
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bcfb821a2d055df521586df14276ce8dc73b2f2c
| 58,493
|
py
|
Python
|
tests/test_data/test_loading.py
|
SebastienLinker/mmaction2
|
07df27e5a3f91eabc90f905c2b6a5e25ed1366d0
|
[
"Apache-2.0"
] | 1
|
2021-01-28T06:01:53.000Z
|
2021-01-28T06:01:53.000Z
|
tests/test_data/test_loading.py
|
suki7/mmaction_fsl_78
|
8e61f2a145a14dea0475867ce7d3a3e9936a7847
|
[
"Apache-2.0"
] | null | null | null |
tests/test_data/test_loading.py
|
suki7/mmaction_fsl_78
|
8e61f2a145a14dea0475867ce7d3a3e9936a7847
|
[
"Apache-2.0"
] | null | null | null |
import copy
import os.path as osp
import mmcv
import numpy as np
import pytest
import torch
from numpy.testing import assert_array_almost_equal, assert_array_equal
# yapf: disable
from mmaction.datasets.pipelines import (AudioDecode, AudioDecodeInit,
AudioFeatureSelector, DecordDecode,
DecordInit, DenseSampleFrames,
FrameSelector,
GenerateLocalizationLabels,
LoadAudioFeature, LoadHVULabel,
LoadLocalizationFeature,
LoadProposals, OpenCVDecode,
OpenCVInit, PyAVDecode, PyAVInit,
RawFrameDecode, SampleAVAFrames,
SampleFrames, SampleProposalFrames,
UntrimmedSampleFrames)
# yapf: enable
class ExampleSSNInstance:
def __init__(self,
start_frame,
end_frame,
num_frames,
label=None,
best_iou=None,
overlap_self=None):
self.start_frame = start_frame
self.end_frame = min(end_frame, num_frames)
self.label = label if label is not None else -1
self.coverage = (end_frame - start_frame) / num_frames
self.best_iou = best_iou
self.overlap_self = overlap_self
class TestLoading:
@staticmethod
def check_keys_contain(result_keys, target_keys):
"""Check if all elements in target_keys is in result_keys."""
return set(target_keys).issubset(set(result_keys))
@classmethod
def setup_class(cls):
cls.img_path = osp.join(
osp.dirname(osp.dirname(__file__)), 'data/test.jpg')
cls.video_path = osp.join(
osp.dirname(osp.dirname(__file__)), 'data/test.mp4')
cls.wav_path = osp.join(
osp.dirname(osp.dirname(__file__)), 'data/test.wav')
cls.audio_spec_path = osp.join(
osp.dirname(osp.dirname(__file__)), 'data/test.npy')
cls.img_dir = osp.join(
osp.dirname(osp.dirname(__file__)), 'data/test_imgs')
cls.raw_feature_dir = osp.join(
osp.dirname(osp.dirname(__file__)),
'data/test_activitynet_features')
cls.bsp_feature_dir = osp.join(
osp.dirname(osp.dirname(__file__)), 'data/test_bsp_features')
cls.proposals_dir = osp.join(
osp.dirname(osp.dirname(__file__)), 'data/test_proposals')
cls.total_frames = 5
cls.filename_tmpl = 'img_{:05}.jpg'
cls.flow_filename_tmpl = '{}_{:05d}.jpg'
video_total_frames = len(mmcv.VideoReader(cls.video_path))
cls.audio_total_frames = video_total_frames
cls.video_results = dict(
filename=cls.video_path,
label=1,
total_frames=video_total_frames,
start_index=0)
cls.audio_results = dict(
audios=np.random.randn(1280, ),
audio_path=cls.wav_path,
total_frames=cls.audio_total_frames,
label=1,
start_index=0)
cls.audio_feature_results = dict(
audios=np.random.randn(128, 80),
audio_path=cls.audio_spec_path,
total_frames=cls.audio_total_frames,
label=1,
start_index=0)
cls.frame_results = dict(
frame_dir=cls.img_dir,
total_frames=cls.total_frames,
filename_tmpl=cls.filename_tmpl,
start_index=1,
modality='RGB',
offset=0,
label=1)
cls.flow_frame_results = dict(
frame_dir=cls.img_dir,
total_frames=cls.total_frames,
filename_tmpl=cls.flow_filename_tmpl,
modality='Flow',
offset=0,
label=1)
cls.action_results = dict(
video_name='v_test1',
data_prefix=cls.raw_feature_dir,
temporal_scale=5,
boundary_ratio=0.1,
duration_second=10,
duration_frame=10,
feature_frame=8,
annotations=[{
'segment': [3.0, 5.0],
'label': 'Rock climbing'
}])
cls.proposal_results = dict(
frame_dir=cls.img_dir,
video_id='test_imgs',
total_frames=cls.total_frames,
filename_tmpl=cls.filename_tmpl,
start_index=1,
out_proposals=[[[
'test_imgs',
ExampleSSNInstance(1, 4, 10, 1, 1, 1)
], 0], [['test_imgs',
ExampleSSNInstance(2, 5, 10, 2, 1, 1)], 0]])
cls.ava_results = dict(
fps=30, timestamp=902, timestamp_start=840, shot_info=(0, 27000))
cls.hvu_label_example1 = dict(
categories=['action', 'object', 'scene', 'concept'],
category_nums=[2, 5, 3, 2],
label=dict(action=[0], object=[2, 3], scene=[0, 1]))
cls.hvu_label_example2 = dict(
categories=['action', 'object', 'scene', 'concept'],
category_nums=[2, 5, 3, 2],
label=dict(action=[1], scene=[1, 2], concept=[1]))
def test_load_hvu_label(self):
hvu_label_example1 = copy.deepcopy(self.hvu_label_example1)
hvu_label_example2 = copy.deepcopy(self.hvu_label_example2)
categories = hvu_label_example1['categories']
category_nums = hvu_label_example1['category_nums']
num_tags = sum(category_nums)
num_categories = len(categories)
loader = LoadHVULabel()
result1 = loader(hvu_label_example1)
label1 = torch.zeros(num_tags)
mask1 = torch.zeros(num_tags)
category_mask1 = torch.zeros(num_categories)
label1[[0, 4, 5, 7, 8]] = 1.
mask1[:10] = 1.
category_mask1[:3] = 1.
assert torch.all(torch.eq(label1, result1['label']))
assert torch.all(torch.eq(mask1, result1['mask']))
assert torch.all(torch.eq(category_mask1, result1['category_mask']))
result2 = loader(hvu_label_example2)
label2 = torch.zeros(num_tags)
mask2 = torch.zeros(num_tags)
category_mask2 = torch.zeros(num_categories)
label2[[1, 8, 9, 11]] = 1.
mask2[:2] = 1.
mask2[7:] = 1.
category_mask2[[0, 2, 3]] = 1.
assert torch.all(torch.eq(label2, result2['label']))
assert torch.all(torch.eq(mask2, result2['mask']))
assert torch.all(torch.eq(category_mask2, result2['category_mask']))
def test_sample_frames(self):
target_keys = [
'frame_inds', 'clip_len', 'frame_interval', 'num_clips',
'total_frames'
]
with pytest.warns(UserWarning):
# start_index has been deprecated
config = dict(
clip_len=3, frame_interval=1, num_clips=5, start_index=1)
SampleFrames(**config)
# Sample Frame with no temporal_jitter
# clip_len=3, frame_interval=1, num_clips=5
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
config = dict(
clip_len=3, frame_interval=1, num_clips=5, temporal_jitter=False)
sample_frames = SampleFrames(**config)
sample_frames_results = sample_frames(video_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 15
sample_frames_results = sample_frames(frame_result)
assert len(sample_frames_results['frame_inds']) == 15
assert np.max(sample_frames_results['frame_inds']) <= 5
assert np.min(sample_frames_results['frame_inds']) >= 1
# Sample Frame with no temporal_jitter
# clip_len=5, frame_interval=1, num_clips=5,
# out_of_bound_opt='repeat_last'
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
config = dict(
clip_len=5,
frame_interval=1,
num_clips=5,
temporal_jitter=False,
out_of_bound_opt='repeat_last')
sample_frames = SampleFrames(**config)
sample_frames_results = sample_frames(video_result)
def check_monotonous(arr):
length = arr.shape[0]
for i in range(length - 1):
if arr[i] > arr[i + 1]:
return False
return True
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 25
frame_inds = sample_frames_results['frame_inds'].reshape([5, 5])
for i in range(5):
assert check_monotonous(frame_inds[i])
sample_frames_results = sample_frames(frame_result)
assert len(sample_frames_results['frame_inds']) == 25
frame_inds = sample_frames_results['frame_inds'].reshape([5, 5])
for i in range(5):
assert check_monotonous(frame_inds[i])
assert np.max(sample_frames_results['frame_inds']) <= 5
assert np.min(sample_frames_results['frame_inds']) >= 1
# Sample Frame with temporal_jitter
# clip_len=4, frame_interval=2, num_clips=5
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
config = dict(
clip_len=4, frame_interval=2, num_clips=5, temporal_jitter=True)
sample_frames = SampleFrames(**config)
sample_frames_results = sample_frames(video_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 20
sample_frames_results = sample_frames(frame_result)
assert len(sample_frames_results['frame_inds']) == 20
assert np.max(sample_frames_results['frame_inds']) <= 5
assert np.min(sample_frames_results['frame_inds']) >= 1
# Sample Frame with no temporal_jitter in test mode
# clip_len=4, frame_interval=1, num_clips=6
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
config = dict(
clip_len=4,
frame_interval=1,
num_clips=6,
temporal_jitter=False,
test_mode=True)
sample_frames = SampleFrames(**config)
sample_frames_results = sample_frames(video_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 24
sample_frames_results = sample_frames(frame_result)
assert len(sample_frames_results['frame_inds']) == 24
assert np.max(sample_frames_results['frame_inds']) <= 5
assert np.min(sample_frames_results['frame_inds']) >= 1
# Sample Frame with no temporal_jitter in test mode
# clip_len=3, frame_interval=1, num_clips=6
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
config = dict(
clip_len=3,
frame_interval=1,
num_clips=6,
temporal_jitter=False,
test_mode=True)
sample_frames = SampleFrames(**config)
sample_frames_results = sample_frames(video_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 18
sample_frames_results = sample_frames(frame_result)
assert len(sample_frames_results['frame_inds']) == 18
assert np.max(sample_frames_results['frame_inds']) <= 5
assert np.min(sample_frames_results['frame_inds']) >= 1
# Sample Frame with no temporal_jitter to get clip_offsets
# clip_len=1, frame_interval=1, num_clips=8
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
frame_result['total_frames'] = 6
config = dict(
clip_len=1,
frame_interval=1,
num_clips=8,
temporal_jitter=False,
test_mode=True)
sample_frames = SampleFrames(**config)
sample_frames_results = sample_frames(video_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 8
sample_frames_results = sample_frames(frame_result)
assert len(sample_frames_results['frame_inds']) == 8
assert_array_equal(sample_frames_results['frame_inds'],
np.array([1, 2, 2, 3, 4, 5, 5, 6]))
# Sample Frame with no temporal_jitter to get clip_offsets
# clip_len=1, frame_interval=1, num_clips=8
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
frame_result['total_frames'] = 6
config = dict(
clip_len=1,
frame_interval=1,
num_clips=8,
temporal_jitter=False,
test_mode=True)
sample_frames = SampleFrames(**config)
sample_frames_results = sample_frames(video_result)
assert sample_frames_results['start_index'] == 0
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 8
sample_frames_results = sample_frames(frame_result)
assert len(sample_frames_results['frame_inds']) == 8
assert_array_equal(sample_frames_results['frame_inds'],
np.array([1, 2, 2, 3, 4, 5, 5, 6]))
# Sample Frame with no temporal_jitter to get clip_offsets zero
# clip_len=6, frame_interval=1, num_clips=1
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
frame_result['total_frames'] = 5
config = dict(
clip_len=6,
frame_interval=1,
num_clips=1,
temporal_jitter=False,
test_mode=True)
sample_frames = SampleFrames(**config)
sample_frames_results = sample_frames(video_result)
assert sample_frames_results['start_index'] == 0
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 6
sample_frames_results = sample_frames(frame_result)
assert len(sample_frames_results['frame_inds']) == 6
assert_array_equal(sample_frames_results['frame_inds'],
[1, 2, 3, 4, 5, 1])
# Sample Frame with no temporal_jitter to get avg_interval <= 0
# clip_len=12, frame_interval=1, num_clips=20
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
frame_result['total_frames'] = 30
config = dict(
clip_len=12,
frame_interval=1,
num_clips=20,
temporal_jitter=False,
test_mode=False)
sample_frames = SampleFrames(**config)
sample_frames_results = sample_frames(video_result)
assert sample_frames_results['start_index'] == 0
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 240
sample_frames_results = sample_frames(frame_result)
assert len(sample_frames_results['frame_inds']) == 240
assert np.max(sample_frames_results['frame_inds']) <= 30
assert np.min(sample_frames_results['frame_inds']) >= 1
# Sample Frame with no temporal_jitter to get clip_offsets
# clip_len=1, frame_interval=1, num_clips=8
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
frame_result['total_frames'] = 6
config = dict(
clip_len=1,
frame_interval=1,
num_clips=8,
temporal_jitter=False,
test_mode=False)
sample_frames = SampleFrames(**config)
sample_frames_results = sample_frames(video_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert sample_frames_results['start_index'] == 0
assert len(sample_frames_results['frame_inds']) == 8
sample_frames_results = sample_frames(frame_result)
assert len(sample_frames_results['frame_inds']) == 8
assert_array_equal(sample_frames_results['frame_inds'],
np.array([1, 2, 3, 3, 4, 5, 5, 6]))
# Sample Frame with no temporal_jitter to get clip_offsets zero
# clip_len=12, frame_interval=1, num_clips=2
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
frame_result['total_frames'] = 10
config = dict(
clip_len=12,
frame_interval=1,
num_clips=2,
temporal_jitter=False,
test_mode=False)
sample_frames = SampleFrames(**config)
sample_frames_results = sample_frames(video_result)
assert sample_frames_results['start_index'] == 0
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 24
sample_frames_results = sample_frames(frame_result)
assert len(sample_frames_results['frame_inds']) == 24
assert np.max(sample_frames_results['frame_inds']) <= 10
assert np.min(sample_frames_results['frame_inds']) >= 1
# Sample Frame using twice sample
# clip_len=12, frame_interval=1, num_clips=2
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
frame_result['total_frames'] = 40
config = dict(
clip_len=12,
frame_interval=1,
num_clips=2,
temporal_jitter=False,
twice_sample=True,
test_mode=True)
sample_frames = SampleFrames(**config)
sample_frames_results = sample_frames(video_result)
assert sample_frames_results['start_index'] == 0
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 48
sample_frames_results = sample_frames(frame_result)
assert len(sample_frames_results['frame_inds']) == 48
assert np.max(sample_frames_results['frame_inds']) <= 40
assert np.min(sample_frames_results['frame_inds']) >= 1
def test_dense_sample_frames(self):
target_keys = [
'frame_inds', 'clip_len', 'frame_interval', 'num_clips',
'total_frames'
]
# Dense sample with no temporal_jitter in test mode
# clip_len=4, frame_interval=1, num_clips=6
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
config = dict(
clip_len=4,
frame_interval=1,
num_clips=6,
temporal_jitter=False,
test_mode=True)
dense_sample_frames = DenseSampleFrames(**config)
dense_sample_frames_results = dense_sample_frames(video_result)
assert dense_sample_frames_results['start_index'] == 0
assert self.check_keys_contain(dense_sample_frames_results.keys(),
target_keys)
assert len(dense_sample_frames_results['frame_inds']) == 240
dense_sample_frames_results = dense_sample_frames(frame_result)
assert len(dense_sample_frames_results['frame_inds']) == 240
# Dense sample with no temporal_jitter
# clip_len=4, frame_interval=1, num_clips=6
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
config = dict(
clip_len=4, frame_interval=1, num_clips=6, temporal_jitter=False)
dense_sample_frames = DenseSampleFrames(**config)
dense_sample_frames_results = dense_sample_frames(video_result)
assert dense_sample_frames_results['start_index'] == 0
assert self.check_keys_contain(dense_sample_frames_results.keys(),
target_keys)
assert len(dense_sample_frames_results['frame_inds']) == 24
dense_sample_frames_results = dense_sample_frames(frame_result)
assert len(dense_sample_frames_results['frame_inds']) == 24
# Dense sample with no temporal_jitter, sample_range=32 in test mode
# clip_len=4, frame_interval=1, num_clips=6
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
config = dict(
clip_len=4,
frame_interval=1,
num_clips=6,
sample_range=32,
temporal_jitter=False,
test_mode=True)
dense_sample_frames = DenseSampleFrames(**config)
dense_sample_frames_results = dense_sample_frames(video_result)
assert dense_sample_frames_results['start_index'] == 0
assert self.check_keys_contain(dense_sample_frames_results.keys(),
target_keys)
assert len(dense_sample_frames_results['frame_inds']) == 240
dense_sample_frames_results = dense_sample_frames(frame_result)
assert len(dense_sample_frames_results['frame_inds']) == 240
# Dense sample with no temporal_jitter, sample_range=32
# clip_len=4, frame_interval=1, num_clips=6
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
config = dict(
clip_len=4,
frame_interval=1,
num_clips=6,
sample_range=32,
temporal_jitter=False)
dense_sample_frames = DenseSampleFrames(**config)
dense_sample_frames_results = dense_sample_frames(video_result)
assert dense_sample_frames_results['start_index'] == 0
assert self.check_keys_contain(dense_sample_frames_results.keys(),
target_keys)
assert len(dense_sample_frames_results['frame_inds']) == 24
dense_sample_frames_results = dense_sample_frames(frame_result)
assert len(dense_sample_frames_results['frame_inds']) == 24
# Dense sample with no temporal_jitter, sample_range=1000 to check mod
# clip_len=4, frame_interval=1, num_clips=6
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
config = dict(
clip_len=4,
frame_interval=1,
num_clips=6,
sample_range=1000,
temporal_jitter=False)
dense_sample_frames = DenseSampleFrames(**config)
dense_sample_frames_results = dense_sample_frames(video_result)
assert dense_sample_frames_results['start_index'] == 0
assert self.check_keys_contain(dense_sample_frames_results.keys(),
target_keys)
assert len(dense_sample_frames_results['frame_inds']) == 24
dense_sample_frames_results = dense_sample_frames(frame_result)
assert len(dense_sample_frames_results['frame_inds']) == 24
# Dense sample with no temporal_jitter in test mode
# sample_range=32, num_sample_positions=5
# clip_len=4, frame_interval=1, num_clips=6
video_result = copy.deepcopy(self.video_results)
frame_result = copy.deepcopy(self.frame_results)
config = dict(
clip_len=4,
frame_interval=1,
num_clips=6,
num_sample_positions=5,
sample_range=32,
temporal_jitter=False,
test_mode=True)
dense_sample_frames = DenseSampleFrames(**config)
dense_sample_frames_results = dense_sample_frames(video_result)
assert dense_sample_frames_results['start_index'] == 0
assert self.check_keys_contain(dense_sample_frames_results.keys(),
target_keys)
assert len(dense_sample_frames_results['frame_inds']) == 120
dense_sample_frames_results = dense_sample_frames(frame_result)
assert len(dense_sample_frames_results['frame_inds']) == 120
def test_untrim_sample_frames(self):
target_keys = [
'frame_inds', 'clip_len', 'frame_interval', 'num_clips',
'total_frames'
]
frame_result = dict(
frame_dir=None,
total_frames=100,
filename_tmpl=None,
modality='RGB',
start_index=0,
label=1)
video_result = copy.deepcopy(self.video_results)
config = dict(clip_len=1, frame_interval=16, start_index=0)
sample_frames = UntrimmedSampleFrames(**config)
sample_frames_results = sample_frames(frame_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 6
assert_array_equal(sample_frames_results['frame_inds'],
np.array([8, 24, 40, 56, 72, 88]))
config = dict(clip_len=1, frame_interval=16, start_index=0)
sample_frames = UntrimmedSampleFrames(**config)
sample_frames_results = sample_frames(video_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
frame_inds = np.array(list(range(8, 300, 16)))
assert len(sample_frames_results['frame_inds']) == frame_inds.shape[0]
assert_array_equal(sample_frames_results['frame_inds'], frame_inds)
config = dict(clip_len=1, frame_interval=16)
sample_frames = UntrimmedSampleFrames(**config)
frame_result_ = copy.deepcopy(frame_result)
frame_result_['start_index'] = 1
sample_frames_results = sample_frames(frame_result_)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 6
assert_array_equal(sample_frames_results['frame_inds'],
np.array([8, 24, 40, 56, 72, 88]) + 1)
config = dict(clip_len=3, frame_interval=16, start_index=0)
sample_frames = UntrimmedSampleFrames(**config)
sample_frames_results = sample_frames(frame_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 18
assert_array_equal(
sample_frames_results['frame_inds'],
np.array([
7, 8, 9, 23, 24, 25, 39, 40, 41, 55, 56, 57, 71, 72, 73, 87,
88, 89
]))
def test_sample_ava_frames(self):
target_keys = [
'fps', 'timestamp', 'timestamp_start', 'shot_info', 'frame_inds',
'clip_len', 'frame_interval'
]
config = dict(clip_len=32, frame_interval=2)
sample_ava_dataset = SampleAVAFrames(**config)
ava_result = sample_ava_dataset(results=self.ava_results)
assert self.check_keys_contain(ava_result.keys(), target_keys)
assert ava_result['clip_len'] == 32
assert ava_result['frame_interval'] == 2
assert len(ava_result['frame_inds']) == 32
# add test case in Issue #306
config = dict(clip_len=8, frame_interval=8)
sample_ava_dataset = SampleAVAFrames(**config)
ava_result = sample_ava_dataset(results=self.ava_results)
assert self.check_keys_contain(ava_result.keys(), target_keys)
assert ava_result['clip_len'] == 8
assert ava_result['frame_interval'] == 8
assert len(ava_result['frame_inds']) == 8
def test_sample_proposal_frames(self):
target_keys = [
'frame_inds', 'clip_len', 'frame_interval', 'num_clips',
'total_frames', 'start_index'
]
# test error cases
with pytest.raises(TypeError):
proposal_result = copy.deepcopy(self.proposal_results)
config = dict(
clip_len=1,
frame_interval=1,
body_segments=2,
aug_segments=('error', 'error'),
aug_ratio=0.5,
temporal_jitter=False)
sample_frames = SampleProposalFrames(**config)
sample_frames(proposal_result)
# test normal cases
# Sample Frame with no temporal_jitter
# clip_len=1, frame_interval=1
# body_segments=2, aug_segments=(1, 1)
proposal_result = copy.deepcopy(self.proposal_results)
proposal_result['total_frames'] = 9
config = dict(
clip_len=1,
frame_interval=1,
body_segments=2,
aug_segments=(1, 1),
aug_ratio=0.5,
temporal_jitter=False)
sample_frames = SampleProposalFrames(**config)
sample_frames_results = sample_frames(proposal_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 8
# Sample Frame with temporal_jitter
# clip_len=1, frame_interval=1
# body_segments=2, aug_segments=(1, 1)
proposal_result = copy.deepcopy(self.proposal_results)
proposal_result['total_frames'] = 9
config = dict(
clip_len=1,
frame_interval=1,
body_segments=2,
aug_segments=(1, 1),
aug_ratio=0.5,
temporal_jitter=True)
sample_frames = SampleProposalFrames(**config)
sample_frames_results = sample_frames(proposal_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 8
# Sample Frame with no temporal_jitter in val mode
# clip_len=1, frame_interval=1
# body_segments=2, aug_segments=(1, 1)
proposal_result = copy.deepcopy(self.proposal_results)
proposal_result['total_frames'] = 9
config = dict(
clip_len=1,
frame_interval=1,
body_segments=2,
aug_segments=(1, 1),
aug_ratio=0.5,
temporal_jitter=False,
mode='val')
sample_frames = SampleProposalFrames(**config)
sample_frames_results = sample_frames(proposal_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 8
# Sample Frame with no temporal_jitter in test mode
# test_interval=2
proposal_result = copy.deepcopy(self.proposal_results)
proposal_result['out_proposals'] = None
proposal_result['total_frames'] = 10
config = dict(
clip_len=1,
frame_interval=1,
body_segments=2,
aug_segments=(1, 1),
aug_ratio=0.5,
test_interval=2,
temporal_jitter=False,
mode='test')
sample_frames = SampleProposalFrames(**config)
sample_frames_results = sample_frames(proposal_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 5
# Sample Frame with no temporal_jitter to get clip_offsets zero
# clip_len=1, frame_interval=1
# body_segments=2, aug_segments=(1, 1)
proposal_result = copy.deepcopy(self.proposal_results)
proposal_result['total_frames'] = 3
config = dict(
clip_len=1,
frame_interval=1,
body_segments=2,
aug_segments=(1, 1),
aug_ratio=0.5,
temporal_jitter=False)
sample_frames = SampleProposalFrames(**config)
sample_frames_results = sample_frames(proposal_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 8
# Sample Frame with no temporal_jitter to
# get clip_offsets zero in val mode
# clip_len=1, frame_interval=1
# body_segments=4, aug_segments=(2, 2)
proposal_result = copy.deepcopy(self.proposal_results)
proposal_result['total_frames'] = 3
config = dict(
clip_len=1,
frame_interval=1,
body_segments=4,
aug_segments=(2, 2),
aug_ratio=0.5,
temporal_jitter=False,
mode='val')
sample_frames = SampleProposalFrames(**config)
sample_frames_results = sample_frames(proposal_result)
assert self.check_keys_contain(sample_frames_results.keys(),
target_keys)
assert len(sample_frames_results['frame_inds']) == 16
def test_pyav_init(self):
target_keys = ['video_reader', 'total_frames']
video_result = copy.deepcopy(self.video_results)
pyav_init = PyAVInit()
pyav_init_result = pyav_init(video_result)
assert self.check_keys_contain(pyav_init_result.keys(), target_keys)
assert pyav_init_result['total_frames'] == 300
def test_pyav_decode(self):
target_keys = ['frame_inds', 'imgs', 'original_shape']
# test PyAV with 2 dim input and start_index = 0
video_result = copy.deepcopy(self.video_results)
video_result['frame_inds'] = np.arange(0, self.total_frames,
2)[:, np.newaxis]
pyav_init = PyAVInit()
pyav_init_result = pyav_init(video_result)
video_result['video_reader'] = pyav_init_result['video_reader']
pyav_decode = PyAVDecode()
pyav_decode_result = pyav_decode(video_result)
assert self.check_keys_contain(pyav_decode_result.keys(), target_keys)
assert pyav_decode_result['original_shape'] == (256, 340)
assert np.shape(pyav_decode_result['imgs']) == (len(
video_result['frame_inds']), 256, 340, 3)
# test PyAV with 1 dim input and start_index = 0
video_result = copy.deepcopy(self.video_results)
video_result['frame_inds'] = np.arange(0, self.total_frames, 5)
pyav_init = PyAVInit()
pyav_init_result = pyav_init(video_result)
video_result['video_reader'] = pyav_init_result['video_reader']
pyav_decode = PyAVDecode()
pyav_decode_result = pyav_decode(video_result)
assert self.check_keys_contain(pyav_decode_result.keys(), target_keys)
assert pyav_decode_result['original_shape'] == (256, 340)
assert np.shape(pyav_decode_result['imgs']) == (len(
video_result['frame_inds']), 256, 340, 3)
# PyAV with multi thread and start_index = 0
video_result = copy.deepcopy(self.video_results)
video_result['frame_inds'] = np.arange(0, self.total_frames, 5)
pyav_init = PyAVInit()
pyav_init_result = pyav_init(video_result)
video_result['video_reader'] = pyav_init_result['video_reader']
pyav_decode = PyAVDecode(multi_thread=True)
pyav_decode_result = pyav_decode(video_result)
assert self.check_keys_contain(pyav_decode_result.keys(), target_keys)
assert pyav_decode_result['original_shape'] == (256, 340)
assert np.shape(pyav_decode_result['imgs']) == (len(
video_result['frame_inds']), 256, 340, 3)
# test PyAV with 2 dim input
video_result = copy.deepcopy(self.video_results)
video_result['frame_inds'] = np.arange(1, self.total_frames,
2)[:, np.newaxis]
pyav_init = PyAVInit()
pyav_init_result = pyav_init(video_result)
video_result['video_reader'] = pyav_init_result['video_reader']
pyav_decode = PyAVDecode()
pyav_decode_result = pyav_decode(video_result)
assert self.check_keys_contain(pyav_decode_result.keys(), target_keys)
assert pyav_decode_result['original_shape'] == (256, 340)
assert np.shape(pyav_decode_result['imgs']) == (len(
video_result['frame_inds']), 256, 340, 3)
# test PyAV with 1 dim input
video_result = copy.deepcopy(self.video_results)
video_result['frame_inds'] = np.arange(1, self.total_frames, 5)
pyav_init = PyAVInit()
pyav_init_result = pyav_init(video_result)
video_result['video_reader'] = pyav_init_result['video_reader']
pyav_decode = PyAVDecode()
pyav_decode_result = pyav_decode(video_result)
assert self.check_keys_contain(pyav_decode_result.keys(), target_keys)
assert pyav_decode_result['original_shape'] == (256, 340)
assert np.shape(pyav_decode_result['imgs']) == (len(
video_result['frame_inds']), 256, 340, 3)
# PyAV with multi thread
video_result = copy.deepcopy(self.video_results)
video_result['frame_inds'] = np.arange(1, self.total_frames, 5)
pyav_init = PyAVInit()
pyav_init_result = pyav_init(video_result)
video_result['video_reader'] = pyav_init_result['video_reader']
pyav_decode = PyAVDecode(multi_thread=True)
pyav_decode_result = pyav_decode(video_result)
assert self.check_keys_contain(pyav_decode_result.keys(), target_keys)
assert pyav_decode_result['original_shape'] == (256, 340)
assert np.shape(pyav_decode_result['imgs']) == (len(
video_result['frame_inds']), 256, 340, 3)
assert repr(pyav_decode) == pyav_decode.__class__.__name__ + \
f'(multi_thread={True})'
def test_decord_init(self):
target_keys = ['video_reader', 'total_frames']
video_result = copy.deepcopy(self.video_results)
decord_init = DecordInit()
decord_init_result = decord_init(video_result)
assert self.check_keys_contain(decord_init_result.keys(), target_keys)
assert decord_init_result['total_frames'] == len(
decord_init_result['video_reader'])
def test_decord_decode(self):
target_keys = ['frame_inds', 'imgs', 'original_shape']
# test Decord with 2 dim input and start_index = 0
video_result = copy.deepcopy(self.video_results)
video_result['frame_inds'] = np.arange(0, self.total_frames,
3)[:, np.newaxis]
decord_init = DecordInit()
decord_init_result = decord_init(video_result)
video_result['video_reader'] = decord_init_result['video_reader']
decord_decode = DecordDecode()
decord_decode_result = decord_decode(video_result)
assert self.check_keys_contain(decord_decode_result.keys(),
target_keys)
assert decord_decode_result['original_shape'] == (256, 340)
assert np.shape(decord_decode_result['imgs']) == (len(
video_result['frame_inds']), 256, 340, 3)
# test Decord with 1 dim input and start_index = 0
video_result = copy.deepcopy(self.video_results)
video_result['frame_inds'] = np.arange(0, self.total_frames, 3)
decord_init = DecordInit()
decord_init_result = decord_init(video_result)
video_result['video_reader'] = decord_init_result['video_reader']
decord_decode = DecordDecode()
decord_decode_result = decord_decode(video_result)
assert self.check_keys_contain(decord_decode_result.keys(),
target_keys)
assert decord_decode_result['original_shape'] == (256, 340)
assert np.shape(decord_decode_result['imgs']) == (len(
video_result['frame_inds']), 256, 340, 3)
# test Decord with 2 dim input and start_index = 0
video_result = copy.deepcopy(self.video_results)
video_result['frame_inds'] = np.arange(0, self.total_frames,
3)[:, np.newaxis]
decord_init = DecordInit()
decord_init_result = decord_init(video_result)
video_result['video_reader'] = decord_init_result['video_reader']
decord_decode = DecordDecode()
decord_decode_result = decord_decode(video_result)
assert self.check_keys_contain(decord_decode_result.keys(),
target_keys)
assert decord_decode_result['original_shape'] == (256, 340)
assert np.shape(decord_decode_result['imgs']) == (len(
video_result['frame_inds']), 256, 340, 3)
# test Decord with 1 dim input
video_result = copy.deepcopy(self.video_results)
video_result['frame_inds'] = np.arange(1, self.total_frames, 3)
decord_init = DecordInit()
decord_init_result = decord_init(video_result)
video_result['video_reader'] = decord_init_result['video_reader']
decord_decode = DecordDecode()
decord_decode_result = decord_decode(video_result)
assert self.check_keys_contain(decord_decode_result.keys(),
target_keys)
assert decord_decode_result['original_shape'] == (256, 340)
assert np.shape(decord_decode_result['imgs']) == (len(
video_result['frame_inds']), 256, 340, 3)
def test_opencv_init(self):
target_keys = ['new_path', 'video_reader', 'total_frames']
video_result = copy.deepcopy(self.video_results)
opencv_init = OpenCVInit()
opencv_init_result = opencv_init(video_result)
assert self.check_keys_contain(opencv_init_result.keys(), target_keys)
assert opencv_init_result['total_frames'] == len(
opencv_init_result['video_reader'])
def test_opencv_decode(self):
target_keys = ['frame_inds', 'imgs', 'original_shape']
# test OpenCV with 2 dim input when start_index = 0
video_result = copy.deepcopy(self.video_results)
video_result['frame_inds'] = np.arange(0, self.total_frames,
2)[:, np.newaxis]
opencv_init = OpenCVInit()
opencv_init_result = opencv_init(video_result)
video_result['video_reader'] = opencv_init_result['video_reader']
opencv_decode = OpenCVDecode()
opencv_decode_result = opencv_decode(video_result)
assert self.check_keys_contain(opencv_decode_result.keys(),
target_keys)
assert opencv_decode_result['original_shape'] == (256, 340)
assert np.shape(opencv_decode_result['imgs']) == (len(
video_result['frame_inds']), 256, 340, 3)
# test OpenCV with 2 dim input
video_result = copy.deepcopy(self.video_results)
video_result['frame_inds'] = np.arange(1, self.total_frames,
2)[:, np.newaxis]
opencv_init = OpenCVInit()
opencv_init_result = opencv_init(video_result)
video_result['video_reader'] = opencv_init_result['video_reader']
opencv_decode = OpenCVDecode()
opencv_decode_result = opencv_decode(video_result)
assert self.check_keys_contain(opencv_decode_result.keys(),
target_keys)
assert opencv_decode_result['original_shape'] == (256, 340)
assert np.shape(opencv_decode_result['imgs']) == (len(
video_result['frame_inds']), 256, 340, 3)
# test OpenCV with 1 dim input when start_index = 0
video_result = copy.deepcopy(self.video_results)
video_result['frame_inds'] = np.arange(0, self.total_frames, 3)
opencv_init = OpenCVInit()
opencv_init_result = opencv_init(video_result)
video_result['video_reader'] = opencv_init_result['video_reader']
# test OpenCV with 1 dim input
video_result = copy.deepcopy(self.video_results)
video_result['frame_inds'] = np.arange(1, self.total_frames, 3)
opencv_init = OpenCVInit()
opencv_init_result = opencv_init(video_result)
video_result['video_reader'] = opencv_init_result['video_reader']
opencv_decode = OpenCVDecode()
opencv_decode_result = opencv_decode(video_result)
assert self.check_keys_contain(opencv_decode_result.keys(),
target_keys)
assert opencv_decode_result['original_shape'] == (256, 340)
assert np.shape(opencv_decode_result['imgs']) == (len(
video_result['frame_inds']), 256, 340, 3)
def test_rawframe_selector(self):
with pytest.warns(UserWarning):
FrameSelector(io_backend='disk')
def test_rawframe_decode(self):
target_keys = ['frame_inds', 'imgs', 'original_shape', 'modality']
# test frame selector with 2 dim input
inputs = copy.deepcopy(self.frame_results)
inputs['frame_inds'] = np.arange(0, self.total_frames, 2)[:,
np.newaxis]
# since the test images start with index 1, we plus 1 to frame_inds
# in order to pass the CI
inputs['frame_inds'] = inputs['frame_inds'] + 1
frame_selector = RawFrameDecode(io_backend='disk')
results = frame_selector(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
assert np.shape(results['imgs']) == (len(inputs['frame_inds']), 240,
320, 3)
assert results['original_shape'] == (240, 320)
# test frame selector with 2 dim input
inputs = copy.deepcopy(self.frame_results)
inputs['frame_inds'] = np.arange(1, self.total_frames, 2)[:,
np.newaxis]
frame_selector = RawFrameDecode(io_backend='disk')
results = frame_selector(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
assert np.shape(results['imgs']) == (len(inputs['frame_inds']), 240,
320, 3)
assert results['original_shape'] == (240, 320)
# test frame selector with 1 dim input when start_index = 0
inputs = copy.deepcopy(self.frame_results)
inputs['frame_inds'] = np.arange(0, self.total_frames, 5)
# since the test images start with index 1, we plus 1 to frame_inds
# in order to pass the CI
inputs['frame_inds'] = inputs['frame_inds'] + 1
frame_selector = RawFrameDecode(io_backend='disk')
results = frame_selector(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
assert np.shape(results['imgs']) == (len(inputs['frame_inds']), 240,
320, 3)
assert results['original_shape'] == (240, 320)
# test frame selector with 1 dim input
inputs = copy.deepcopy(self.frame_results)
inputs['frame_inds'] = np.arange(1, self.total_frames, 5)
frame_selector = RawFrameDecode(io_backend='disk')
results = frame_selector(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
assert np.shape(results['imgs']) == (len(inputs['frame_inds']), 240,
320, 3)
assert results['original_shape'] == (240, 320)
# test frame selector with 1 dim input
inputs = copy.deepcopy(self.frame_results)
inputs['frame_inds'] = np.arange(0, self.total_frames, 2)
# since the test images start with index 1, we plus 1 to frame_inds
# in order to pass the CI
inputs['frame_inds'] = inputs['frame_inds'] + 1
frame_selector = RawFrameDecode(io_backend='disk')
results = frame_selector(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
assert np.shape(results['imgs']) == (len(inputs['frame_inds']), 240,
320, 3)
assert results['original_shape'] == (240, 320)
# test frame selector with 1 dim input
inputs = copy.deepcopy(self.frame_results)
inputs['frame_inds'] = np.arange(1, self.total_frames, 2)
frame_selector = RawFrameDecode(io_backend='disk')
results = frame_selector(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
assert np.shape(results['imgs']) == (len(inputs['frame_inds']), 240,
320, 3)
assert results['original_shape'] == (240, 320)
# test frame selector with 1 dim input for flow images
inputs = copy.deepcopy(self.flow_frame_results)
inputs['frame_inds'] = np.arange(0, self.total_frames, 2)
# since the test images start with index 1, we plus 1 to frame_inds
# in order to pass the CI
inputs['frame_inds'] = inputs['frame_inds'] + 1
frame_selector = RawFrameDecode(io_backend='disk')
results = frame_selector(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
assert np.shape(results['imgs']) == (len(inputs['frame_inds']) * 2,
240, 320)
assert results['original_shape'] == (240, 320)
# test frame selector with 1 dim input for flow images
inputs = copy.deepcopy(self.flow_frame_results)
inputs['frame_inds'] = np.arange(1, self.total_frames, 2)
frame_selector = RawFrameDecode(io_backend='disk')
results = frame_selector(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
assert np.shape(results['imgs']) == (len(inputs['frame_inds']) * 2,
240, 320)
assert results['original_shape'] == (240, 320)
# test frame selector in turbojpeg decording backend
# when start_index = 0
inputs = copy.deepcopy(self.frame_results)
inputs['frame_inds'] = np.arange(0, self.total_frames, 5)
# since the test images start with index 1, we plus 1 to frame_inds
# in order to pass the CI
inputs['frame_inds'] = inputs['frame_inds'] + 1
frame_selector = RawFrameDecode(
io_backend='disk', decoding_backend='turbojpeg')
results = frame_selector(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
assert np.shape(results['imgs']) == (len(inputs['frame_inds']), 240,
320, 3)
assert results['original_shape'] == (240, 320)
# test frame selector in turbojpeg decording backend
inputs = copy.deepcopy(self.frame_results)
inputs['frame_inds'] = np.arange(1, self.total_frames, 5)
frame_selector = RawFrameDecode(
io_backend='disk', decoding_backend='turbojpeg')
results = frame_selector(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
assert np.shape(results['imgs']) == (len(inputs['frame_inds']), 240,
320, 3)
assert results['original_shape'] == (240, 320)
def test_load_localization_feature(self):
target_keys = ['raw_feature']
action_result = copy.deepcopy(self.action_results)
# test error cases
with pytest.raises(NotImplementedError):
load_localization_feature = LoadLocalizationFeature(
'unsupport_ext')
# test normal cases
load_localization_feature = LoadLocalizationFeature()
load_localization_feature_result = load_localization_feature(
action_result)
assert self.check_keys_contain(load_localization_feature_result.keys(),
target_keys)
assert load_localization_feature_result['raw_feature'].shape == (400,
5)
def test_generate_localization_label(self):
action_result = copy.deepcopy(self.action_results)
action_result['raw_feature'] = np.random.randn(400, 5)
# test default setting
target_keys = ['gt_bbox']
generate_localization_labels = GenerateLocalizationLabels()
generate_localization_labels_result = generate_localization_labels(
action_result)
assert self.check_keys_contain(
generate_localization_labels_result.keys(), target_keys)
assert_array_almost_equal(
generate_localization_labels_result['gt_bbox'], [[0.375, 0.625]],
decimal=4)
def test_load_proposals(self):
target_keys = [
'bsp_feature', 'tmin', 'tmax', 'tmin_score', 'tmax_score',
'reference_temporal_iou'
]
action_result = copy.deepcopy(self.action_results)
# test error cases
with pytest.raises(NotImplementedError):
load_proposals = LoadProposals(5, self.proposals_dir,
self.bsp_feature_dir,
'unsupport_ext')
with pytest.raises(NotImplementedError):
load_proposals = LoadProposals(5, self.proposals_dir,
self.bsp_feature_dir, '.csv',
'unsupport_ext')
# test normal cases
load_proposals = LoadProposals(5, self.proposals_dir,
self.bsp_feature_dir)
load_proposals_result = load_proposals(action_result)
assert self.check_keys_contain(load_proposals_result.keys(),
target_keys)
assert (load_proposals_result['bsp_feature'].shape[0] == 5)
assert load_proposals_result['tmin'].shape == (5, )
assert_array_almost_equal(
load_proposals_result['tmin'], np.arange(0.1, 0.6, 0.1), decimal=4)
assert load_proposals_result['tmax'].shape == (5, )
assert_array_almost_equal(
load_proposals_result['tmax'], np.arange(0.2, 0.7, 0.1), decimal=4)
assert load_proposals_result['tmin_score'].shape == (5, )
assert_array_almost_equal(
load_proposals_result['tmin_score'],
np.arange(0.95, 0.90, -0.01),
decimal=4)
assert load_proposals_result['tmax_score'].shape == (5, )
assert_array_almost_equal(
load_proposals_result['tmax_score'],
np.arange(0.96, 0.91, -0.01),
decimal=4)
assert load_proposals_result['reference_temporal_iou'].shape == (5, )
assert_array_almost_equal(
load_proposals_result['reference_temporal_iou'],
np.arange(0.85, 0.80, -0.01),
decimal=4)
def test_audio_decode_init(self):
target_keys = ['audios', 'length', 'sample_rate']
inputs = copy.deepcopy(self.audio_results)
audio_decode_init = AudioDecodeInit()
results = audio_decode_init(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
# test when no audio file exists
inputs = copy.deepcopy(self.audio_results)
inputs['audio_path'] = 'foo/foo/bar.wav'
audio_decode_init = AudioDecodeInit()
results = audio_decode_init(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
assert results['audios'].shape == (10.0 *
audio_decode_init.sample_rate, )
def test_audio_decode(self):
target_keys = ['frame_inds', 'audios']
inputs = copy.deepcopy(self.audio_results)
inputs['frame_inds'] = np.arange(0, self.audio_total_frames,
2)[:, np.newaxis]
inputs['num_clips'] = 1
inputs['length'] = 1280
audio_selector = AudioDecode()
results = audio_selector(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
def test_load_audio_feature(self):
target_keys = ['audios']
inputs = copy.deepcopy(self.audio_feature_results)
load_audio_feature = LoadAudioFeature()
results = load_audio_feature(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
# test when no audio feature file exists
inputs = copy.deepcopy(self.audio_feature_results)
inputs['audio_path'] = 'foo/foo/bar.npy'
load_audio_feature = LoadAudioFeature()
results = load_audio_feature(inputs)
assert results['audios'].shape == (640, 80)
assert self.check_keys_contain(results.keys(), target_keys)
def test_audio_feature_selector(self):
target_keys = ['audios']
# test frame selector with 2 dim input
inputs = copy.deepcopy(self.audio_feature_results)
inputs['frame_inds'] = np.arange(0, self.audio_total_frames,
2)[:, np.newaxis]
inputs['num_clips'] = 1
inputs['length'] = 1280
audio_feature_selector = AudioFeatureSelector()
results = audio_feature_selector(inputs)
assert self.check_keys_contain(results.keys(), target_keys)
| 45.308288
| 79
| 0.617288
| 6,837
| 58,493
| 4.950271
| 0.048998
| 0.08403
| 0.088698
| 0.051056
| 0.864677
| 0.853065
| 0.832058
| 0.816812
| 0.796159
| 0.782715
| 0
| 0.026976
| 0.286393
| 58,493
| 1,290
| 80
| 45.343411
| 0.783858
| 0.07271
| 0
| 0.726843
| 0
| 0
| 0.071182
| 0.002568
| 0
| 0
| 0
| 0
| 0.214556
| 1
| 0.023629
| false
| 0
| 0.007561
| 0
| 0.035917
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4c052b882ed69565dffc9d6913443ac460b7da8c
| 181
|
py
|
Python
|
bfgame/factories/recipes/items/weapons/__init__.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | 3
|
2017-10-28T11:28:38.000Z
|
2018-09-12T09:47:00.000Z
|
bfgame/factories/recipes/items/weapons/__init__.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | null | null | null |
bfgame/factories/recipes/items/weapons/__init__.py
|
ChrisLR/BasicDungeonRL
|
b293d40bd9a0d3b7aec41b5e1d58441165997ff1
|
[
"MIT"
] | null | null | null |
from bfgame.factories.recipes.items.weapons.melee import *
from bfgame.factories.recipes.items.weapons.ranged import *
from bfgame.factories.recipes.items.weapons.throwing import *
| 45.25
| 61
| 0.834254
| 24
| 181
| 6.291667
| 0.416667
| 0.198676
| 0.377483
| 0.516556
| 0.834437
| 0.834437
| 0.582781
| 0
| 0
| 0
| 0
| 0
| 0.066298
| 181
| 3
| 62
| 60.333333
| 0.893491
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 9
|
4c172ad74ec73726fda9bacc7862d77ae63255cb
| 15,126
|
py
|
Python
|
neuralnet_pytorch/layers/normalization.py
|
justanhduc/neuralnet-pytorch
|
cbb0c5a540a0ba91cb4dd20684bb00692305d193
|
[
"MIT"
] | 28
|
2019-01-07T04:07:55.000Z
|
2021-11-09T15:16:11.000Z
|
neuralnet_pytorch/layers/normalization.py
|
justanhduc/neuralnet-pytorch
|
cbb0c5a540a0ba91cb4dd20684bb00692305d193
|
[
"MIT"
] | 9
|
2019-12-25T08:00:33.000Z
|
2021-11-23T09:02:34.000Z
|
neuralnet_pytorch/layers/normalization.py
|
justanhduc/neuralnet-pytorch
|
cbb0c5a540a0ba91cb4dd20684bb00692305d193
|
[
"MIT"
] | 3
|
2020-08-07T12:49:05.000Z
|
2022-03-07T21:32:39.000Z
|
import torch.nn as nn
from torch._six import container_abcs
from .. import utils
from ..utils import _image_shape, _matrix_shape, _pointset_shape
from .abstract import _LayerMethod
__all__ = ['BatchNorm1d', 'BatchNorm2d', 'LayerNorm', 'InstanceNorm2d', 'FeatureNorm1d', 'InstanceNorm1d', 'GroupNorm']
@utils.add_simple_repr
@utils.no_dim_change_op
class BatchNorm1d(nn.BatchNorm1d, _LayerMethod):
"""
Performs batch normalization on 1D signals.
Parameters
----------
input_shape
shape of the input tensor.
If an integer is passed, it is treated as the size of each input sample.
eps
a value added to the denominator for numerical stability.
Default: 1e-5.
momentum
the value used for the running_mean and running_var
computation. Can be set to ``None`` for cumulative moving average
(i.e. simple average). Default: 0.1.
affine
a boolean value that when set to ``True``, this module has
learnable affine parameters. Default: ``True``.
track_running_stats
a boolean value that when set to ``True``, this
module tracks the running mean and variance, and when set to ``False``,
this module does not track such statistics and always uses batch
statistics in both training and eval modes. Default: ``True``.
activation
non-linear function to activate the linear result.
It accepts any callable function
as well as a recognizable ``str``.
A list of possible ``str`` is in :const:`~neuralnet_pytorch.utils.function`.
no_scale: bool
whether to use a trainable scale parameter. Default: ``True``.
kwargs
extra keyword arguments to pass to activation.
"""
def __init__(self, input_shape, eps=1e-5, momentum=0.1, affine=True, track_running_stats=True, activation=None,
no_scale=False, **kwargs):
input_shape = _matrix_shape(input_shape)
assert isinstance(input_shape, container_abcs.Iterable), 'input_shape must be a list or tuple, got %s' % type(
input_shape)
self.input_shape = input_shape
self.activation = utils.function(activation, **kwargs)
self.no_scale = no_scale
super().__init__(input_shape[1], eps, momentum, affine, track_running_stats)
if self.no_scale:
nn.init.constant_(self.weight, 1.)
self.weight.requires_grad_(False)
def forward(self, input, *args, **kwargs):
output = self.activation(super().forward(input))
return output
def reset(self):
super().reset_parameters()
if self.no_scale:
nn.init.constant_(self.weight, 1.)
@utils.add_simple_repr
@utils.no_dim_change_op
class BatchNorm2d(nn.BatchNorm2d, _LayerMethod):
"""
Performs batch normalization on 2D signals.
Parameters
----------
input_shape
shape of the 4D input image.
If a single integer is passed, it is treated as the number of input channels
and other sizes are unknown.
eps
a value added to the denominator for numerical stability.
Default: 1e-5.
momentum
the value used for the running_mean and running_var
computation. Can be set to ``None`` for cumulative moving average
(i.e. simple average). Default: 0.1.
affine
a boolean value that when set to ``True``, this module has
learnable affine parameters. Default: ``True``.
track_running_stats
a boolean value that when set to ``True``, this
module tracks the running mean and variance, and when set to ``False``,
this module does not track such statistics and always uses batch
statistics in both training and eval modes. Default: ``True``.
activation
non-linear function to activate the linear result.
It accepts any callable function
as well as a recognizable ``str``.
A list of possible ``str`` is in :const:`~neuralnet_pytorch.utils.function`.
no_scale: bool
whether to use a trainable scale parameter. Default: ``True``.
kwargs
extra keyword arguments to pass to activation.
"""
def __init__(self, input_shape, eps=1e-5, momentum=0.1, affine=True, track_running_stats=True, activation=None,
no_scale=False, **kwargs):
input_shape = _image_shape(input_shape)
assert isinstance(input_shape, container_abcs.Iterable), 'input_shape must be a list or tuple, got %s' % type(
input_shape)
self.input_shape = input_shape
self.activation = utils.function(activation, **kwargs)
self.no_scale = no_scale
super().__init__(self.input_shape[1], eps, momentum, affine, track_running_stats)
if self.no_scale:
nn.init.constant_(self.weight, 1.)
def forward(self, input, *args, **kwargs):
output = self.activation(super().forward(input))
return output
def reset(self):
super().reset_parameters()
if self.no_scale:
nn.init.constant_(self.weight, 1.)
@utils.add_simple_repr
@utils.no_dim_change_op
class LayerNorm(nn.LayerNorm, _LayerMethod):
"""
Performs layer normalization on input tensor.
Parameters
----------
input_shape
input shape from an expected input of size
.. math::
[\\text{input_shape}[0] \\times \\text{input_shape}[1]
\\times \\ldots \\times \\text{input_shape}[-1]]
If a single integer is used, it is treated as a singleton list, and this module will
normalize over the last dimension which is expected to be of that specific size.
eps
a value added to the denominator for numerical stability. Default: 1e-5.
elementwise_affine
a boolean value that when set to ``True``, this module
has learnable per-element affine parameters initialized to ones (for weights)
and zeros (for biases). Default: ``True``.
activation
non-linear function to activate the linear result.
It accepts any callable function
as well as a recognizable ``str``.
A list of possible ``str`` is in :const:`~neuralnet_pytorch.utils.function`.
kwargs
extra keyword arguments to pass to activation.
"""
def __init__(self, input_shape, eps=1e-5, elementwise_affine=True, activation=None, **kwargs):
assert isinstance(input_shape, container_abcs.Iterable), 'input_shape must be a list or tuple, got %s' % type(
input_shape)
assert None not in input_shape[1:], 'All dims in input_shape must be specified except the first dim'
self.input_shape = _matrix_shape(input_shape)
self.activation = utils.function(activation, **kwargs)
super().__init__(input_shape[1:], eps, elementwise_affine)
def forward(self, input):
output = super().forward(input)
return self.activation(output)
@utils.add_simple_repr
@utils.no_dim_change_op
class InstanceNorm1d(nn.InstanceNorm1d, _LayerMethod):
"""
Performs instance normalization on 1D signals.
Parameters
----------
input_shape
shape of the input tensor.
If an integer is passed, it is treated as the size of each input sample.
eps
a value added to the denominator for numerical stability.
Default: 1e-5.
momentum
the value used for the running_mean and running_var
computation. Can be set to ``None`` for cumulative moving average
(i.e. simple average). Default: 0.1.
affine
a boolean value that when set to ``True``, this module has
learnable affine parameters. Default: ``True``.
track_running_stats
a boolean value that when set to ``True``, this
module tracks the running mean and variance, and when set to ``False``,
this module does not track such statistics and always uses batch
statistics in both training and eval modes. Default: ``True``.
activation
non-linear function to activate the linear result.
It accepts any callable function
as well as a recognizable ``str``.
A list of possible ``str`` is in :const:`~neuralnet_pytorch.utils.function`.
kwargs
extra keyword arguments to pass to activation.
"""
def __init__(self, input_shape, eps=1e-05, momentum=0.1, affine=True, track_running_stats=False, activation=None,
**kwargs):
input_shape = _matrix_shape(input_shape)
assert isinstance(input_shape, container_abcs.Iterable), 'input_shape must be a list or tuple, got %s' % type(
input_shape)
self.input_shape = input_shape
self.activation = utils.function(activation, **kwargs)
super().__init__(input_shape[-1], eps, momentum, affine, track_running_stats)
def forward(self, input):
output = super().forward(input)
return self.activation(output)
@utils.add_simple_repr
@utils.no_dim_change_op
class InstanceNorm2d(nn.InstanceNorm2d, _LayerMethod):
"""
Performs instance normalization on 2D signals.
Parameters
----------
input_shape
shape of the 4D input image.
If a single integer is passed, it is treated as the number of input channels
and other sizes are unknown.
eps
a value added to the denominator for numerical stability.
Default: 1e-5.
momentum
the value used for the running_mean and running_var
computation. Can be set to ``None`` for cumulative moving average
(i.e. simple average). Default: 0.1.
affine
a boolean value that when set to ``True``, this module has
learnable affine parameters. Default: ``True``.
track_running_stats
a boolean value that when set to ``True``, this
module tracks the running mean and variance, and when set to ``False``,
this module does not track such statistics and always uses batch
statistics in both training and eval modes. Default: ``True``.
activation
non-linear function to activate the linear result.
It accepts any callable function
as well as a recognizable ``str``.
A list of possible ``str`` is in :const:`~neuralnet_pytorch.utils.function`.
kwargs
extra keyword arguments to pass to activation.
"""
def __init__(self, input_shape, eps=1e-05, momentum=0.1, affine=True, track_running_stats=False, activation=None,
**kwargs):
input_shape = _image_shape(input_shape)
assert isinstance(input_shape, container_abcs.Iterable), 'input_shape must be a list or tuple, got %s' % type(
input_shape)
self.input_shape = input_shape
self.activation = utils.function(activation, **kwargs)
super().__init__(input_shape[1], eps, momentum, affine, track_running_stats)
def forward(self, input):
output = super().forward(input)
return self.activation(output)
class GroupNorm(nn.GroupNorm, _LayerMethod):
"""
Performs instance normalization on 2D signals.
Parameters
----------
input_shape
shape of the 4D input image.
If a single integer is passed, it is treated as the number of input channels
and other sizes are unknown.
num_groups : int
number of channels expected in input
eps
a value added to the denominator for numerical stability.
Default: 1e-5.
affine
a boolean value that when set to ``True``, this module has
learnable affine parameters. Default: ``True``.
activation
non-linear function to activate the linear result.
It accepts any callable function
as well as a recognizable ``str``.
A list of possible ``str`` is in :const:`~neuralnet_pytorch.utils.function`.
kwargs
extra keyword arguments to pass to activation.
"""
def __init__(self, input_shape, num_groups, eps=1e-5, affine=True, activation=None, **kwargs):
input_shape = _image_shape(input_shape)
assert input_shape[1] is not None, 'Dimension at index 1 (index starts at 0) must be specified'
self.input_shape = input_shape
self.activation = utils.function(activation, **kwargs)
super().__init__(num_groups, input_shape[1], eps, affine)
def forward(self, input):
output = super().forward(input)
return self.activation(output)
@utils.add_simple_repr
@utils.no_dim_change_op
class FeatureNorm1d(nn.BatchNorm1d, _LayerMethod):
"""
Performs batch normalization over the last dimension of the input.
Parameters
----------
input_shape
shape of the input tensor.
If an integer is passed, it is treated as the size of each input sample.
eps
a value added to the denominator for numerical stability.
Default: 1e-5.
momentum
the value used for the running_mean and running_var
computation. Can be set to ``None`` for cumulative moving average
(i.e. simple average). Default: 0.1.
affine
a boolean value that when set to ``True``, this module has
learnable affine parameters. Default: ``True``.
track_running_stats
a boolean value that when set to ``True``, this
module tracks the running mean and variance, and when set to ``False``,
this module does not track such statistics and always uses batch
statistics in both training and eval modes. Default: ``True``.
activation
non-linear function to activate the linear result.
It accepts any callable function
as well as a recognizable ``str``.
A list of possible ``str`` is in :const:`~neuralnet_pytorch.utils.function`.
no_scale: bool
whether to use a trainable scale parameter. Default: ``True``.
kwargs
extra keyword arguments to pass to activation.
"""
def __init__(self, input_shape, eps=1e-5, momentum=0.1, affine=True, track_running_stats=True, activation=None,
no_scale=False, **kwargs):
input_shape = _pointset_shape(input_shape)
assert isinstance(input_shape, container_abcs.Iterable), 'input_shape must be a list or tuple, got %s' % type(
input_shape)
self.input_shape = input_shape
self.activation = utils.function(activation, **kwargs)
self.no_scale = no_scale
super().__init__(input_shape[-1], eps, momentum, affine, track_running_stats)
if self.no_scale:
nn.init.constant_(self.weight, 1.)
self.weight.requires_grad_(False)
def forward(self, input, *args, **kwargs):
shape = input.shape
input = input.view(-1, input.shape[-1])
output = self.activation(super().forward(input))
output = output.view(*shape)
return output
@property
@utils.validate
def output_shape(self):
return self.input_shape
def reset(self):
super().reset_parameters()
if self.no_scale:
nn.init.constant_(self.weight, 1.)
| 39.186528
| 119
| 0.662105
| 1,993
| 15,126
| 4.883091
| 0.101857
| 0.077065
| 0.015721
| 0.020962
| 0.883683
| 0.868372
| 0.853987
| 0.853987
| 0.853987
| 0.853987
| 0
| 0.008391
| 0.251554
| 15,126
| 385
| 120
| 39.288312
| 0.85125
| 0.51633
| 0
| 0.775194
| 0
| 0
| 0.071064
| 0
| 0
| 0
| 0
| 0
| 0.062016
| 1
| 0.139535
| false
| 0
| 0.03876
| 0.007752
| 0.294574
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d5b563ff3b040947c16a1ed8a45eab18f28121cb
| 144
|
py
|
Python
|
efficient_rl/agents/oo_mdp_learner/__init__.py
|
rlagywjd802/efficient_rl
|
6a82bfc10d814f5d36a7c211d645aa35ea380acf
|
[
"MIT"
] | 8
|
2020-06-25T10:16:48.000Z
|
2022-02-15T09:12:04.000Z
|
efficient_rl/agents/oo_mdp_learner/__init__.py
|
rlagywjd802/efficient_rl
|
6a82bfc10d814f5d36a7c211d645aa35ea380acf
|
[
"MIT"
] | null | null | null |
efficient_rl/agents/oo_mdp_learner/__init__.py
|
rlagywjd802/efficient_rl
|
6a82bfc10d814f5d36a7c211d645aa35ea380acf
|
[
"MIT"
] | 2
|
2020-12-30T07:39:38.000Z
|
2021-04-12T14:57:13.000Z
|
from efficient_rl.agents.oo_mdp_learner.CELearnerClass import CELearner
from efficient_rl.agents.oo_mdp_learner.CRLearnerClass import CRLearner
| 48
| 71
| 0.902778
| 20
| 144
| 6.2
| 0.6
| 0.209677
| 0.241935
| 0.33871
| 0.532258
| 0.532258
| 0.532258
| 0
| 0
| 0
| 0
| 0
| 0.055556
| 144
| 2
| 72
| 72
| 0.911765
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
d5c478a50560b530dc572eefe4e3b425ce6d2db5
| 5,861
|
py
|
Python
|
tests/test_schema_load.py
|
taalexander/ibm-quantum-schemas
|
93f46b75fe41b5f3be00606612ec2387621df092
|
[
"Apache-2.0"
] | 11
|
2021-01-15T00:26:47.000Z
|
2022-02-28T05:48:13.000Z
|
tests/test_schema_load.py
|
taalexander/ibm-quantum-schemas
|
93f46b75fe41b5f3be00606612ec2387621df092
|
[
"Apache-2.0"
] | 6
|
2020-10-21T11:27:26.000Z
|
2021-06-09T13:16:11.000Z
|
tests/test_schema_load.py
|
taalexander/ibm-quantum-schemas
|
93f46b75fe41b5f3be00606612ec2387621df092
|
[
"Apache-2.0"
] | 20
|
2020-10-21T11:31:54.000Z
|
2022-03-01T15:07:55.000Z
|
# This code is part of Qiskit.
#
# (C) Copyright IBM 2020
#
# This code is licensed under the Apache License, Version 2.0. You may
# obtain a copy of this license in the LICENSE.txt file in the root directory
# of this source tree or at http://www.apache.org/licenses/LICENSE-2.0.
#
# Any modifications or derivative works of this code must retain this
# copyright notice, and modified files need to carry a notice indicating
# that they have been altered from the originals.
import json
import os
import unittest
import ddt
import jsonschema
import fastjsonschema
import jsonschema_rs
LOADERS = [fastjsonschema.compile,
jsonschema_rs.JSONSchema]
SCHEMAS_PATH = os.path.join(
os.path.dirname(os.path.dirname(os.path.abspath(__file__))),
'schemas')
@ddt.ddt
class TestFastJSONSchemaLoad(unittest.TestCase):
@ddt.data(*LOADERS)
def test_backend_configuration_load(self, loader):
with open(os.path.join(SCHEMAS_PATH,
'backend_configuration_schema.json'),
'r') as fd:
self.assertIsNotNone(loader(json.load(fd)))
@ddt.data(*LOADERS)
def test_backend_properties_load(self, loader):
with open(os.path.join(SCHEMAS_PATH,
'backend_properties_schema.json'),
'r') as fd:
self.assertIsNotNone(loader(json.load(fd)))
@ddt.data(*LOADERS)
def test_backend_status_load(self, loader):
with open(os.path.join(SCHEMAS_PATH,
'backend_status_schema.json'),
'r') as fd:
self.assertIsNotNone(loader(json.load(fd)))
@ddt.data(*LOADERS)
def test_default_pulse_config_load(self, loader):
with open(os.path.join(SCHEMAS_PATH,
'default_pulse_configuration_schema.json'),
'r') as fd:
self.assertIsNotNone(loader(json.load(fd)))
@ddt.data(*LOADERS)
def test_ibmq_device_qobj_load(self, loader):
with open(os.path.join(SCHEMAS_PATH,
'ibmq_device_qobj_schema.json'),
'r') as fd:
self.assertIsNotNone(loader(json.load(fd)))
@ddt.data(*LOADERS)
def test_ibmq_simulator_qobj_schema_load(self, loader):
with open(os.path.join(SCHEMAS_PATH,
'ibmq_simulator_qobj_schema.json'),
'r') as fd:
self.assertIsNotNone(loader(json.load(fd)))
@ddt.data(*LOADERS)
def test_job_status_load(self, loader):
with open(os.path.join(SCHEMAS_PATH,
'job_status_schema.json'),
'r') as fd:
self.assertIsNotNone(loader(json.load(fd)))
@ddt.data(*LOADERS)
def test_qobj_load(self, loader):
with open(os.path.join(SCHEMAS_PATH,
'qobj_schema.json'),
'r') as fd:
self.assertIsNotNone(loader(json.load(fd)))
@ddt.data(*LOADERS)
def test_result_schema_load(self, loader):
with open(os.path.join(SCHEMAS_PATH,
'result_schema.json'),
'r') as fd:
self.assertIsNotNone(loader(json.load(fd)))
class TestJSONSchemaLoad(unittest.TestCase):
def test_backend_configuration_load(self,):
with open(os.path.join(SCHEMAS_PATH,
'backend_configuration_schema.json'),
'r') as fd:
self.assertIsNone(jsonschema.Draft4Validator.check_schema(
json.load(fd)))
def test_backend_properties_load(self):
with open(os.path.join(SCHEMAS_PATH,
'backend_properties_schema.json'),
'r') as fd:
self.assertIsNone(jsonschema.Draft4Validator.check_schema(
json.load(fd)))
def test_backend_status_load(self):
with open(os.path.join(SCHEMAS_PATH,
'backend_status_schema.json'),
'r') as fd:
self.assertIsNone(jsonschema.Draft4Validator.check_schema(
json.load(fd)))
def test_default_pulse_config_load(self):
with open(os.path.join(SCHEMAS_PATH,
'default_pulse_configuration_schema.json'),
'r') as fd:
self.assertIsNone(jsonschema.Draft4Validator.check_schema(
json.load(fd)))
def test_ibmq_device_qobj_load(self):
with open(os.path.join(SCHEMAS_PATH,
'ibmq_device_qobj_schema.json'),
'r') as fd:
self.assertIsNone(jsonschema.Draft4Validator.check_schema(
json.load(fd)))
def test_ibmq_simulator_qobj_schema_load(self):
with open(os.path.join(SCHEMAS_PATH,
'ibmq_simulator_qobj_schema.json'),
'r') as fd:
self.assertIsNone(jsonschema.Draft4Validator.check_schema(
json.load(fd)))
def test_job_status_load(self):
with open(os.path.join(SCHEMAS_PATH,
'job_status_schema.json'),
'r') as fd:
self.assertIsNone(jsonschema.Draft4Validator.check_schema(
json.load(fd)))
def test_qobj_load(self):
with open(os.path.join(SCHEMAS_PATH,
'qobj_schema.json'),
'r') as fd:
self.assertIsNone(jsonschema.Draft4Validator.check_schema(
json.load(fd)))
def test_result_schema_load(self):
with open(os.path.join(SCHEMAS_PATH,
'result_schema.json'),
'r') as fd:
self.assertIsNone(jsonschema.Draft4Validator.check_schema(
json.load(fd)))
| 36.179012
| 77
| 0.58403
| 668
| 5,861
| 4.926647
| 0.157186
| 0.082042
| 0.057733
| 0.076572
| 0.807353
| 0.79763
| 0.765421
| 0.750836
| 0.742935
| 0.740504
| 0
| 0.004237
| 0.315475
| 5,861
| 161
| 78
| 36.403727
| 0.816052
| 0.077291
| 0
| 0.731707
| 0
| 0
| 0.0947
| 0.077465
| 0
| 0
| 0
| 0
| 0.146341
| 1
| 0.146341
| false
| 0
| 0.056911
| 0
| 0.219512
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
d5d23d6f27e200158bc79dc6107085a7149db556
| 22,242
|
py
|
Python
|
sim2600/simTIA.py
|
gmarino2048/Sim2600
|
8ce51dce4c28668ca19a27fb9e9e59c5acda71aa
|
[
"CC0-1.0"
] | 3
|
2016-06-07T15:11:09.000Z
|
2020-10-07T20:52:09.000Z
|
sim2600/simTIA.py
|
gmarino2048/Sim2600
|
8ce51dce4c28668ca19a27fb9e9e59c5acda71aa
|
[
"CC0-1.0"
] | null | null | null |
sim2600/simTIA.py
|
gmarino2048/Sim2600
|
8ce51dce4c28668ca19a27fb9e9e59c5acda71aa
|
[
"CC0-1.0"
] | 2
|
2019-08-08T03:43:23.000Z
|
2021-02-27T18:52:21.000Z
|
# Copyright (c) 2014 Greg James, Visual6502.org
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from array import array
import params
# Choose between two flavors of simulation. One uses sets
# to track the groups of wires switched together by transistors.
# The other uses lists.
from circuitSimulatorUsingLists import CircuitSimulator as CircuitSimulatorLists
from circuitSimulatorUsingSets import CircuitSimulator as CircuitSimulatorSets
import pyximport; pyximport.install()
from mycircuitsimulator import CircuitSimulator as MyCircuitSimulator
class SimTIA(CircuitSimulatorLists):
def __init__(self):
CircuitSimulatorLists.__init__(self)
self.loadCircuit(params.chipTIAFile)
self.colLumToRGB8LUT = []
# For debugging or inspecting, this can be used to hold
# the last values written to our write-only control addresses.
self.lastControlValue = array('B', [0] * (0x2C + 1))
self.initColLumLUT()
# Temporarily inhibit TIA from driving DB6 and DB7
self.setHighWN('CS3')
self.setHighWN('CS0')
self.clocksForResetLow = 8
self.recalcAllWires()
# The pads of each chip in the chip simulations can be
# accessed by their name, like 'RDY' or 'CLK0', or by
# their wire index. Accessing by wire index is faster
# so we cache indices here for certain named wires.
self.addressBusPads = []
for padName in params.tiaAddressBusPadNames:
wireIndex = self.getWireIndex(padName)
self.addressBusPads.append(wireIndex)
self.dataBusPads = []
for padName in params.dataBusPadNames:
wireIndex = self.getWireIndex(padName)
self.dataBusPads.append(wireIndex)
self.dataBusDrivers = []
for padName in params.tiaDataBusDrivers:
wireIndex = self.getWireIndex(padName)
self.dataBusDrivers.append(wireIndex)
self.inputPads = []
for padName in params.tiaInputPadNames:
wireIndex = self.getWireIndex(padName)
self.inputPads.append(wireIndex)
self.indDB6_drvLo = self.getWireIndex('DB6_drvLo')
self.indDB6_drvHi = self.getWireIndex('DB6_drvHi')
self.indDB7_drvLo = self.getWireIndex('DB7_drvLo')
self.indDB7_drvHi = self.getWireIndex('DB7_drvHi')
self.padIndCLK0 = self.getWireIndex('CLK0')
self.padIndCLK2 = self.getWireIndex('CLK2')
self.padIndPH0 = self.getWireIndex('PH0')
self.padIndCS0 = self.getWireIndex('CS0')
self.padIndCS3 = self.getWireIndex('CS3')
self.padIndsCS0CS3 = [self.padIndCS0, self.padIndCS3]
self.padIndRW = self.getWireIndex('R/W')
self.padIndDEL = self.getWireIndex('del')
# The TIA's RDY_low wire is high when it's pulling the
# 6502's RDY to ground. RDY_lowCtrl controls RDY_low
self.indRDY_lowCtrl = self.getWireIndex('RDY_lowCtrl')
self.vblank = self.getWireIndex('VBLANK')
self.vsync = self.getWireIndex('VSYNC')
self.wsync = self.getWireIndex('WSYNC')
self.rsync = self.getWireIndex('RSYNC')
# Wires that govern the output pixel's luminance and color
self.L0_lowCtrl = self.getWireIndex('L0_lowCtrl')
self.L1_lowCtrl = self.getWireIndex('L1_lowCtrl')
self.L2_lowCtrl = self.getWireIndex('L2_lowCtrl')
self.colcnt_t0 = self.getWireIndex('COLCNT_T0')
self.colcnt_t1 = self.getWireIndex('COLCNT_T1')
self.colcnt_t2 = self.getWireIndex('COLCNT_T2')
self.colcnt_t3 = self.getWireIndex('COLCNT_T3')
def getTIAStateStr1(self):
sigs = {'LUM':['L0_lowCtrl', 'L1_lowCtrl', 'L2_lowCtrl'],
'COL':['COLCNT_T0','COLCNT_T1','COLCNT_T2','COLCNT_T3']}
report = ''
for s in sigs:
sStr = ''
for probe in sigs[s]:
if self.isHighWN(probe):
sStr += '1'
else:
sStr += '0'
report += s + ' ' + sStr + ' '
return report
def initColLumLUT(self):
# Colors from http://en.wikipedia.org/wiki/Television_Interface_Adapter
col = [[]] * 16
col[0] = [(0,0,0), (236, 236, 236)]
col[1] = [(68, 68, 0), (252, 252, 104)]
col[2] = [(112, 40, 0), (236, 200, 120)]
col[3] = [(132, 24, 0), (252, 188, 148)]
col[4] = [(136, 0, 0), (252, 180, 180)]
col[5] = [(120, 0, 92), (236, 176, 224)]
col[6] = [(72, 0, 120), (212, 176, 252)]
col[7] = [(20, 0, 132), (188, 180, 252)]
col[8] = [(0, 0, 136), (164, 164, 252)]
col[9] = [(0, 24, 124), (164, 200, 252)]
col[10] = [(0, 44, 92), (164, 224, 252)]
col[11] = [(0, 60, 44), (164, 252, 212)]
col[12] = [(0, 60, 0), (184, 252, 184)]
col[13] = [(20, 56, 0), (200, 252, 164)]
col[14] = [(44, 48, 0), (224, 236, 156)]
col[15] = [(68, 40, 0), (252, 224, 140)]
# Interpolate linearly between the colors above using 3-bit lum
# Populate the look up table addressed by a 7-bit col-lum value,
# where color bits are most significant and luminance bits are
# least significant
self.colLumToRGB8LUT = [0]*128
for intKey in xrange(len(col)):
colPair = col[intKey]
start = colPair[0]
end = colPair[1]
dif = ()
for i, startv in enumerate(start):
# result is tuple of same dim as 'start' and 'end'
dif += (end[i] - startv,)
# lumInt from 0 to 7
for lumInt in xrange(8):
lumFrac = lumInt / 7.0
ctup = ()
for i, startv in enumerate(start):
ctup += (int(startv + dif[i]*lumFrac),)
colLumInd = (intKey << 3) + lumInt
self.colLumToRGB8LUT[colLumInd] = ctup
def get3BitLuminance(self):
lum = 7
# If L0_lowCtrl is high, then the pad for the least significant bit of
# luminance is pulled low, so subtract 1 from the luminance
if self.isHigh(self.L0_lowCtrl):
lum -= 1
# If L1_lowCtrl is high, then the pad for the twos bit of luminance
# is pulled low, so subtract 2 from the luminance
if self.isHigh(self.L1_lowCtrl):
lum -= 2
# If the most significant bit is pulled low, subtract 4
if self.isHigh(self.L2_lowCtrl):
lum -= 4
return lum
def get4BitColor(self):
col = 0
if self.isHigh(self.colcnt_t0):
col += 1
if self.isHigh(self.colcnt_t1):
col += 2
if self.isHigh(self.colcnt_t2):
col += 4
if self.isHigh(self.colcnt_t3):
col += 8
return col
def getColorRGBA8(self):
lum = self.get3BitLuminance()
col = self.get4BitColor()
# Lowest 4 bits of col, shift them 3 bits to the right,
# and add the low 3 bits of luminance
index = ((col & 0xF) << 3) + (lum & 0x7)
rgb8Tuple = self.colLumToRGB8LUT[index]
return (rgb8Tuple[0] << 24) | (rgb8Tuple[1] << 16) | \
(rgb8Tuple[2] << 8) | 0xFF
class SimTIASets(CircuitSimulatorSets):
def __init__(self):
CircuitSimulatorSets.__init__(self)
self.loadCircuit(params.chipTIAFile)
self.colLumToRGB8LUT = []
# For debugging or inspecting, this can be used to hold
# the last values written to our write-only control addresses.
self.lastControlValue = array('B', [0] * (0x2C + 1))
self.initColLumLUT()
# Temporarily inhibit TIA from driving DB6 and DB7
self.setHighWN('CS3')
self.setHighWN('CS0')
self.clocksForResetLow = 8
self.recalcAllWires()
# The pads of each chip in the chip simulations can be
# accessed by their name, like 'RDY' or 'CLK0', or by
# their wire index. Accessing by wire index is faster
# so we cache indices here for certain named wires.
self.addressBusPads = []
for padName in params.tiaAddressBusPadNames:
wireIndex = self.getWireIndex(padName)
self.addressBusPads.append(wireIndex)
self.dataBusPads = []
for padName in params.dataBusPadNames:
wireIndex = self.getWireIndex(padName)
self.dataBusPads.append(wireIndex)
self.dataBusDrivers = []
for padName in params.tiaDataBusDrivers:
wireIndex = self.getWireIndex(padName)
self.dataBusDrivers.append(wireIndex)
self.inputPads = []
for padName in params.tiaInputPadNames:
wireIndex = self.getWireIndex(padName)
self.inputPads.append(wireIndex)
self.indDB6_drvLo = self.getWireIndex('DB6_drvLo')
self.indDB6_drvHi = self.getWireIndex('DB6_drvHi')
self.indDB7_drvLo = self.getWireIndex('DB7_drvLo')
self.indDB7_drvHi = self.getWireIndex('DB7_drvHi')
self.padIndCLK0 = self.getWireIndex('CLK0')
self.padIndCLK2 = self.getWireIndex('CLK2')
self.padIndPH0 = self.getWireIndex('PH0')
self.padIndCS0 = self.getWireIndex('CS0')
self.padIndCS3 = self.getWireIndex('CS3')
self.padIndsCS0CS3 = [self.padIndCS0, self.padIndCS3]
self.padIndRW = self.getWireIndex('R/W')
self.padIndDEL = self.getWireIndex('del')
# The TIA's RDY_low wire is high when it's pulling the
# 6502's RDY to ground. RDY_lowCtrl controls RDY_low
self.indRDY_lowCtrl = self.getWireIndex('RDY_lowCtrl')
self.vblank = self.getWireIndex('VBLANK')
self.vsync = self.getWireIndex('VSYNC')
self.wsync = self.getWireIndex('WSYNC')
self.rsync = self.getWireIndex('RSYNC')
# Wires that govern the output pixel's luminance and color
self.L0_lowCtrl = self.getWireIndex('L0_lowCtrl')
self.L1_lowCtrl = self.getWireIndex('L1_lowCtrl')
self.L2_lowCtrl = self.getWireIndex('L2_lowCtrl')
self.colcnt_t0 = self.getWireIndex('COLCNT_T0')
self.colcnt_t1 = self.getWireIndex('COLCNT_T1')
self.colcnt_t2 = self.getWireIndex('COLCNT_T2')
self.colcnt_t3 = self.getWireIndex('COLCNT_T3')
def getTIAStateStr1(self):
sigs = {'LUM':['L0_lowCtrl', 'L1_lowCtrl', 'L2_lowCtrl'],
'COL':['COLCNT_T0','COLCNT_T1','COLCNT_T2','COLCNT_T3']}
report = ''
for s in sigs:
sStr = ''
for probe in sigs[s]:
if self.isHighWN(probe):
sStr += '1'
else:
sStr += '0'
report += s + ' ' + sStr + ' '
return report
def initColLumLUT(self):
# Colors from http://en.wikipedia.org/wiki/Television_Interface_Adapter
col = [[]] * 16
col[0] = [(0,0,0), (236, 236, 236)]
col[1] = [(68, 68, 0), (252, 252, 104)]
col[2] = [(112, 40, 0), (236, 200, 120)]
col[3] = [(132, 24, 0), (252, 188, 148)]
col[4] = [(136, 0, 0), (252, 180, 180)]
col[5] = [(120, 0, 92), (236, 176, 224)]
col[6] = [(72, 0, 120), (212, 176, 252)]
col[7] = [(20, 0, 132), (188, 180, 252)]
col[8] = [(0, 0, 136), (164, 164, 252)]
col[9] = [(0, 24, 124), (164, 200, 252)]
col[10] = [(0, 44, 92), (164, 224, 252)]
col[11] = [(0, 60, 44), (164, 252, 212)]
col[12] = [(0, 60, 0), (184, 252, 184)]
col[13] = [(20, 56, 0), (200, 252, 164)]
col[14] = [(44, 48, 0), (224, 236, 156)]
col[15] = [(68, 40, 0), (252, 224, 140)]
# Interpolate linearly between the colors above using 3-bit lum
# Populate the look up table addressed by a 7-bit col-lum value,
# where color bits are most significant and luminance bits are
# least significant
self.colLumToRGB8LUT = [0]*128
for intKey in xrange(len(col)):
colPair = col[intKey]
start = colPair[0]
end = colPair[1]
dif = ()
for i, startv in enumerate(start):
# result is tuple of same dim as 'start' and 'end'
dif += (end[i] - startv,)
# lumInt from 0 to 7
for lumInt in xrange(8):
lumFrac = lumInt / 7.0
ctup = ()
for i, startv in enumerate(start):
ctup += (int(startv + dif[i]*lumFrac),)
colLumInd = (intKey << 3) + lumInt
self.colLumToRGB8LUT[colLumInd] = ctup
def get3BitLuminance(self):
lum = 7
# If L0_lowCtrl is high, then the pad for the least significant bit of
# luminance is pulled low, so subtract 1 from the luminance
if self.isHigh(self.L0_lowCtrl):
lum -= 1
# If L1_lowCtrl is high, then the pad for the twos bit of luminance
# is pulled low, so subtract 2 from the luminance
if self.isHigh(self.L1_lowCtrl):
lum -= 2
# If the most significant bit is pulled low, subtract 4
if self.isHigh(self.L2_lowCtrl):
lum -= 4
return lum
def get4BitColor(self):
col = 0
if self.isHigh(self.colcnt_t0):
col += 1
if self.isHigh(self.colcnt_t1):
col += 2
if self.isHigh(self.colcnt_t2):
col += 4
if self.isHigh(self.colcnt_t3):
col += 8
return col
def getColorRGBA8(self):
lum = self.get3BitLuminance()
col = self.get4BitColor()
# Lowest 4 bits of col, shift them 3 bits to the right,
# and add the low 3 bits of luminance
index = ((col & 0xF) << 3) + (lum & 0x7)
rgb8Tuple = self.colLumToRGB8LUT[index]
return (rgb8Tuple[0] << 24) | (rgb8Tuple[1] << 16) | \
(rgb8Tuple[2] << 8) | 0xFF
class MySimTIA(MyCircuitSimulator):
def __init__(self):
MyCircuitSimulator.__init__(self)
self.loadCircuit(params.chipTIAFile)
self.colLumToRGB8LUT = []
# For debugging or inspecting, this can be used to hold
# the last values written to our write-only control addresses.
self.lastControlValue = array('B', [0] * (0x2C + 1))
self.initColLumLUT()
# Temporarily inhibit TIA from driving DB6 and DB7
self.setHighWN('CS3')
self.setHighWN('CS0')
self.clocksForResetLow = 8
self.recalcAllWires()
# The pads of each chip in the chip simulations can be
# accessed by their name, like 'RDY' or 'CLK0', or by
# their wire index. Accessing by wire index is faster
# so we cache indices here for certain named wires.
self.addressBusPads = []
for padName in params.tiaAddressBusPadNames:
wireIndex = self.getWireIndex(padName)
self.addressBusPads.append(wireIndex)
self.dataBusPads = []
for padName in params.dataBusPadNames:
wireIndex = self.getWireIndex(padName)
self.dataBusPads.append(wireIndex)
self.dataBusDrivers = []
for padName in params.tiaDataBusDrivers:
wireIndex = self.getWireIndex(padName)
self.dataBusDrivers.append(wireIndex)
self.inputPads = []
for padName in params.tiaInputPadNames:
wireIndex = self.getWireIndex(padName)
self.inputPads.append(wireIndex)
self.indDB6_drvLo = self.getWireIndex('DB6_drvLo')
self.indDB6_drvHi = self.getWireIndex('DB6_drvHi')
self.indDB7_drvLo = self.getWireIndex('DB7_drvLo')
self.indDB7_drvHi = self.getWireIndex('DB7_drvHi')
self.padIndCLK0 = self.getWireIndex('CLK0')
self.padIndCLK2 = self.getWireIndex('CLK2')
self.padIndPH0 = self.getWireIndex('PH0')
self.padIndCS0 = self.getWireIndex('CS0')
self.padIndCS3 = self.getWireIndex('CS3')
self.padIndsCS0CS3 = [self.padIndCS0, self.padIndCS3]
self.padIndRW = self.getWireIndex('R/W')
self.padIndDEL = self.getWireIndex('del')
# The TIA's RDY_low wire is high when it's pulling the
# 6502's RDY to ground. RDY_lowCtrl controls RDY_low
self.indRDY_lowCtrl = self.getWireIndex('RDY_lowCtrl')
self.vblank = self.getWireIndex('VBLANK')
self.vsync = self.getWireIndex('VSYNC')
self.wsync = self.getWireIndex('WSYNC')
self.rsync = self.getWireIndex('RSYNC')
# Wires that govern the output pixel's luminance and color
self.L0_lowCtrl = self.getWireIndex('L0_lowCtrl')
self.L1_lowCtrl = self.getWireIndex('L1_lowCtrl')
self.L2_lowCtrl = self.getWireIndex('L2_lowCtrl')
self.colcnt_t0 = self.getWireIndex('COLCNT_T0')
self.colcnt_t1 = self.getWireIndex('COLCNT_T1')
self.colcnt_t2 = self.getWireIndex('COLCNT_T2')
self.colcnt_t3 = self.getWireIndex('COLCNT_T3')
def getTIAStateStr1(self):
sigs = {'LUM':['L0_lowCtrl', 'L1_lowCtrl', 'L2_lowCtrl'],
'COL':['COLCNT_T0','COLCNT_T1','COLCNT_T2','COLCNT_T3']}
report = ''
for s in sigs:
sStr = ''
for probe in sigs[s]:
if self.isHighWN(probe):
sStr += '1'
else:
sStr += '0'
report += s + ' ' + sStr + ' '
return report
def initColLumLUT(self):
# Colors from http://en.wikipedia.org/wiki/Television_Interface_Adapter
col = [[]] * 16
col[0] = [(0,0,0), (236, 236, 236)]
col[1] = [(68, 68, 0), (252, 252, 104)]
col[2] = [(112, 40, 0), (236, 200, 120)]
col[3] = [(132, 24, 0), (252, 188, 148)]
col[4] = [(136, 0, 0), (252, 180, 180)]
col[5] = [(120, 0, 92), (236, 176, 224)]
col[6] = [(72, 0, 120), (212, 176, 252)]
col[7] = [(20, 0, 132), (188, 180, 252)]
col[8] = [(0, 0, 136), (164, 164, 252)]
col[9] = [(0, 24, 124), (164, 200, 252)]
col[10] = [(0, 44, 92), (164, 224, 252)]
col[11] = [(0, 60, 44), (164, 252, 212)]
col[12] = [(0, 60, 0), (184, 252, 184)]
col[13] = [(20, 56, 0), (200, 252, 164)]
col[14] = [(44, 48, 0), (224, 236, 156)]
col[15] = [(68, 40, 0), (252, 224, 140)]
# Interpolate linearly between the colors above using 3-bit lum
# Populate the look up table addressed by a 7-bit col-lum value,
# where color bits are most significant and luminance bits are
# least significant
self.colLumToRGB8LUT = [0]*128
for intKey in xrange(len(col)):
colPair = col[intKey]
start = colPair[0]
end = colPair[1]
dif = ()
for i, startv in enumerate(start):
# result is tuple of same dim as 'start' and 'end'
dif += (end[i] - startv,)
# lumInt from 0 to 7
for lumInt in xrange(8):
lumFrac = lumInt / 7.0
ctup = ()
for i, startv in enumerate(start):
ctup += (int(startv + dif[i]*lumFrac),)
colLumInd = (intKey << 3) + lumInt
self.colLumToRGB8LUT[colLumInd] = ctup
def get3BitLuminance(self):
lum = 7
# If L0_lowCtrl is high, then the pad for the least significant bit of
# luminance is pulled low, so subtract 1 from the luminance
if self.isHigh(self.L0_lowCtrl):
lum -= 1
# If L1_lowCtrl is high, then the pad for the twos bit of luminance
# is pulled low, so subtract 2 from the luminance
if self.isHigh(self.L1_lowCtrl):
lum -= 2
# If the most significant bit is pulled low, subtract 4
if self.isHigh(self.L2_lowCtrl):
lum -= 4
return lum
def get4BitColor(self):
col = 0
if self.isHigh(self.colcnt_t0):
col += 1
if self.isHigh(self.colcnt_t1):
col += 2
if self.isHigh(self.colcnt_t2):
col += 4
if self.isHigh(self.colcnt_t3):
col += 8
return col
def getColorRGBA8(self):
lum = self.get3BitLuminance()
col = self.get4BitColor()
# Lowest 4 bits of col, shift them 3 bits to the right,
# and add the low 3 bits of luminance
index = ((col & 0xF) << 3) + (lum & 0x7)
rgb8Tuple = self.colLumToRGB8LUT[index]
return (rgb8Tuple[0] << 24) | (rgb8Tuple[1] << 16) | \
(rgb8Tuple[2] << 8) | 0xFF
| 39.717857
| 80
| 0.568339
| 2,669
| 22,242
| 4.676658
| 0.134507
| 0.10383
| 0.020189
| 0.026919
| 0.885275
| 0.885275
| 0.885275
| 0.885275
| 0.885275
| 0.885275
| 0
| 0.07485
| 0.319441
| 22,242
| 559
| 81
| 39.788909
| 0.749752
| 0.225205
| 0
| 0.968254
| 0
| 0
| 0.042014
| 0
| 0
| 0
| 0.002451
| 0
| 0
| 1
| 0.047619
| false
| 0
| 0.015873
| 0
| 0.103175
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
910d96416403b36a8c8dc22b47e3199c27562d01
| 11,836
|
py
|
Python
|
location_register/migrations/0019_auto_20210305_1702.py
|
OlexandrTopuzov/Data_converter
|
0ac2319ccaae790af35ab2202724c65d83d32ecc
|
[
"MIT"
] | null | null | null |
location_register/migrations/0019_auto_20210305_1702.py
|
OlexandrTopuzov/Data_converter
|
0ac2319ccaae790af35ab2202724c65d83d32ecc
|
[
"MIT"
] | null | null | null |
location_register/migrations/0019_auto_20210305_1702.py
|
OlexandrTopuzov/Data_converter
|
0ac2319ccaae790af35ab2202724c65d83d32ecc
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.7 on 2021-03-05 17:02
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('location_register', '0018_auto_20210215_1042'),
]
operations = [
migrations.AlterField(
model_name='country',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='country',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='drvato',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='drvato',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='drvbuilding',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='drvbuilding',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='drvcouncil',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='drvcouncil',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='drvdistrict',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='drvdistrict',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='drvregion',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='drvregion',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='drvstreet',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='drvstreet',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='koatuucategory',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='koatuucategory',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='koatuufirstlevel',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='koatuufirstlevel',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='koatuufourthlevel',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='koatuufourthlevel',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='koatuusecondlevel',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='koatuusecondlevel',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='koatuuthirdlevel',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='koatuuthirdlevel',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='ratucity',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='ratucity',
name='koatuu',
field=models.CharField(help_text='Code of the city, according to the state Classifier of objects of administrative-territorial organization of Ukraine', max_length=10, null=True, unique=True, verbose_name='code KOATUU'),
),
migrations.AlterField(
model_name='ratucity',
name='name',
field=models.CharField(help_text='City name', max_length=100, verbose_name='name'),
),
migrations.AlterField(
model_name='ratucity',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='ratucitydistrict',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='ratucitydistrict',
name='koatuu',
field=models.CharField(help_text='Code of the city district, according to the state Classifier of objects of administrative-territorial organization of Ukraine', max_length=10, null=True, unique=True, verbose_name='code KOATUU'),
),
migrations.AlterField(
model_name='ratucitydistrict',
name='name',
field=models.CharField(help_text='Name of the city district', max_length=100, verbose_name='name'),
),
migrations.AlterField(
model_name='ratucitydistrict',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='ratudistrict',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='ratudistrict',
name='koatuu',
field=models.CharField(help_text='Code of the district, according to the state Classifier of objects of administrative-territorial organization of Ukraine', max_length=10, null=True, unique=True, verbose_name='code KOATUU'),
),
migrations.AlterField(
model_name='ratudistrict',
name='name',
field=models.CharField(help_text='Name of the district', max_length=100, verbose_name='name'),
),
migrations.AlterField(
model_name='ratudistrict',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='raturegion',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='raturegion',
name='koatuu',
field=models.CharField(help_text='Code of the region, according to the state Classifier of objects of administrative-territorial organization of Ukraine', max_length=10, null=True, unique=True, verbose_name='code KOATUU'),
),
migrations.AlterField(
model_name='raturegion',
name='name',
field=models.CharField(help_text='Name of the region', max_length=30, unique=True, verbose_name='name'),
),
migrations.AlterField(
model_name='raturegion',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='ratustreet',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='ratustreet',
name='name',
field=models.CharField(help_text='Street name', max_length=100, verbose_name='name'),
),
migrations.AlterField(
model_name='ratustreet',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
migrations.AlterField(
model_name='zipcode',
name='created_at',
field=models.DateTimeField(auto_now_add=True, help_text='When the object was created. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.'),
),
migrations.AlterField(
model_name='zipcode',
name='updated_at',
field=models.DateTimeField(auto_now=True, help_text='When the object was update. In YYYY-MM-DDTHH:mm:ss.SSSSSSZ format.', null=True),
),
]
| 49.523013
| 241
| 0.628675
| 1,398
| 11,836
| 5.178112
| 0.067954
| 0.124327
| 0.155408
| 0.180274
| 0.971543
| 0.971543
| 0.896533
| 0.880508
| 0.880508
| 0.880508
| 0
| 0.006018
| 0.255914
| 11,836
| 238
| 242
| 49.731092
| 0.815942
| 0.003802
| 0
| 0.931034
| 1
| 0
| 0.337433
| 0.093223
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.00431
| 0
| 0.017241
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
910ebeffe7ad2535019f7c095679ea2e660dac36
| 178
|
py
|
Python
|
Tensile/Tests/weekly/assertions/test_assertions.py
|
zjunweihit/Tensile
|
68b73083c92eecc1b04eec1f006f28aea5628030
|
[
"MIT"
] | null | null | null |
Tensile/Tests/weekly/assertions/test_assertions.py
|
zjunweihit/Tensile
|
68b73083c92eecc1b04eec1f006f28aea5628030
|
[
"MIT"
] | null | null | null |
Tensile/Tests/weekly/assertions/test_assertions.py
|
zjunweihit/Tensile
|
68b73083c92eecc1b04eec1f006f28aea5628030
|
[
"MIT"
] | 5
|
2019-07-29T01:23:56.000Z
|
2022-03-08T09:28:10.000Z
|
import Tensile.Tensile as Tensile
def test_hgemm_asem2_asm(tmpdir):
Tensile.Tensile([Tensile.TensileTestPath("nightly/assertions/test_hgemm_asem2_asm.yaml"), tmpdir.strpath])
| 29.666667
| 107
| 0.831461
| 24
| 178
| 5.916667
| 0.583333
| 0.295775
| 0.197183
| 0.239437
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011976
| 0.061798
| 178
| 5
| 108
| 35.6
| 0.838323
| 0
| 0
| 0
| 0
| 0
| 0.248588
| 0.248588
| 0
| 0
| 0
| 0
| 0.333333
| 1
| 0.333333
| false
| 0
| 0.333333
| 0
| 0.666667
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 7
|
912818686a774180ac047828283cf97f04c1de57
| 29,427
|
py
|
Python
|
atom/proton/python/proton_api/api/annuities_api.py
|
AbhiGupta03/SDK
|
f3a61aae7a847f07f0c22a154ca88dc378e9d25e
|
[
"Apache-2.0"
] | 11
|
2019-04-16T02:11:17.000Z
|
2021-12-16T22:51:40.000Z
|
atom/proton/python/proton_api/api/annuities_api.py
|
AbhiGupta03/SDK
|
f3a61aae7a847f07f0c22a154ca88dc378e9d25e
|
[
"Apache-2.0"
] | 81
|
2019-11-19T23:24:28.000Z
|
2022-03-28T11:35:47.000Z
|
atom/proton/python/proton_api/api/annuities_api.py
|
AbhiGupta03/SDK
|
f3a61aae7a847f07f0c22a154ca88dc378e9d25e
|
[
"Apache-2.0"
] | 11
|
2020-07-08T02:29:56.000Z
|
2022-03-28T10:05:33.000Z
|
# coding: utf-8
"""
Hydrogen Proton API
Financial engineering module of Hydrogen Atom # noqa: E501
OpenAPI spec version: 1.9.2
Contact: info@hydrogenplatform.com
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from proton_api.api_client import ApiClient
class AnnuitiesApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def annuity_calculator_accumulation_horizon(self, annuity_calculator_accumulation_horizon_request, **kwargs): # noqa: E501
"""Annuity Calculator - Accumulation Horizon # noqa: E501
Calculate the necessary fixed annuity contribution period # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.annuity_calculator_accumulation_horizon(annuity_calculator_accumulation_horizon_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AnnuityCalculatorAccumulationHorizonRequest annuity_calculator_accumulation_horizon_request: Request payload for Annuity Calculator - Accumulation Horizon (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.annuity_calculator_accumulation_horizon_with_http_info(annuity_calculator_accumulation_horizon_request, **kwargs) # noqa: E501
else:
(data) = self.annuity_calculator_accumulation_horizon_with_http_info(annuity_calculator_accumulation_horizon_request, **kwargs) # noqa: E501
return data
def annuity_calculator_accumulation_horizon_with_http_info(self, annuity_calculator_accumulation_horizon_request, **kwargs): # noqa: E501
"""Annuity Calculator - Accumulation Horizon # noqa: E501
Calculate the necessary fixed annuity contribution period # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.annuity_calculator_accumulation_horizon_with_http_info(annuity_calculator_accumulation_horizon_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AnnuityCalculatorAccumulationHorizonRequest annuity_calculator_accumulation_horizon_request: Request payload for Annuity Calculator - Accumulation Horizon (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['annuity_calculator_accumulation_horizon_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method annuity_calculator_accumulation_horizon" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'annuity_calculator_accumulation_horizon_request' is set
if self.api_client.client_side_validation and ('annuity_calculator_accumulation_horizon_request' not in params or
params['annuity_calculator_accumulation_horizon_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `annuity_calculator_accumulation_horizon_request` when calling `annuity_calculator_accumulation_horizon`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'annuity_calculator_accumulation_horizon_request' in params:
body_params = params['annuity_calculator_accumulation_horizon_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/annuity_calculator/accumulation_horizon', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, object)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def annuity_calculator_annuity_amount(self, annuity_calculator_annuity_amount_request, **kwargs): # noqa: E501
"""Annuity Calculator - Annuity Amount # noqa: E501
Calculate the achievable fixed annuity amount # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.annuity_calculator_annuity_amount(annuity_calculator_annuity_amount_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AnnuityCalculatorAnnuityAmountRequest annuity_calculator_annuity_amount_request: Request payload for Annuity Calculator - Annuity Amount (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.annuity_calculator_annuity_amount_with_http_info(annuity_calculator_annuity_amount_request, **kwargs) # noqa: E501
else:
(data) = self.annuity_calculator_annuity_amount_with_http_info(annuity_calculator_annuity_amount_request, **kwargs) # noqa: E501
return data
def annuity_calculator_annuity_amount_with_http_info(self, annuity_calculator_annuity_amount_request, **kwargs): # noqa: E501
"""Annuity Calculator - Annuity Amount # noqa: E501
Calculate the achievable fixed annuity amount # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.annuity_calculator_annuity_amount_with_http_info(annuity_calculator_annuity_amount_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AnnuityCalculatorAnnuityAmountRequest annuity_calculator_annuity_amount_request: Request payload for Annuity Calculator - Annuity Amount (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['annuity_calculator_annuity_amount_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method annuity_calculator_annuity_amount" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'annuity_calculator_annuity_amount_request' is set
if self.api_client.client_side_validation and ('annuity_calculator_annuity_amount_request' not in params or
params['annuity_calculator_annuity_amount_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `annuity_calculator_annuity_amount_request` when calling `annuity_calculator_annuity_amount`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'annuity_calculator_annuity_amount_request' in params:
body_params = params['annuity_calculator_annuity_amount_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/annuity_calculator/annuity_amount', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, object)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def annuity_calculator_decumulation_horizon(self, annuity_calculator_decumulation_horizon_request, **kwargs): # noqa: E501
"""Annuity Calculator - Decumulation Horizon # noqa: E501
Calculate the achievable fixed annuity payout period # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.annuity_calculator_decumulation_horizon(annuity_calculator_decumulation_horizon_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AnnuityCalculatorDecumulationHorizonRequest annuity_calculator_decumulation_horizon_request: Request payload for Annuity Calculator - Decumulation Horizon (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.annuity_calculator_decumulation_horizon_with_http_info(annuity_calculator_decumulation_horizon_request, **kwargs) # noqa: E501
else:
(data) = self.annuity_calculator_decumulation_horizon_with_http_info(annuity_calculator_decumulation_horizon_request, **kwargs) # noqa: E501
return data
def annuity_calculator_decumulation_horizon_with_http_info(self, annuity_calculator_decumulation_horizon_request, **kwargs): # noqa: E501
"""Annuity Calculator - Decumulation Horizon # noqa: E501
Calculate the achievable fixed annuity payout period # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.annuity_calculator_decumulation_horizon_with_http_info(annuity_calculator_decumulation_horizon_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AnnuityCalculatorDecumulationHorizonRequest annuity_calculator_decumulation_horizon_request: Request payload for Annuity Calculator - Decumulation Horizon (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['annuity_calculator_decumulation_horizon_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method annuity_calculator_decumulation_horizon" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'annuity_calculator_decumulation_horizon_request' is set
if self.api_client.client_side_validation and ('annuity_calculator_decumulation_horizon_request' not in params or
params['annuity_calculator_decumulation_horizon_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `annuity_calculator_decumulation_horizon_request` when calling `annuity_calculator_decumulation_horizon`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'annuity_calculator_decumulation_horizon_request' in params:
body_params = params['annuity_calculator_decumulation_horizon_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/annuity_calculator/decumulation_horizon', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, object)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def annuity_calculator_deposit_amount(self, annuity_calculator_deposit_amount_request, **kwargs): # noqa: E501
"""Annuity Calculator - Deposit Amount # noqa: E501
Calculate the necessary periodic deposit amount for a fixed annuity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.annuity_calculator_deposit_amount(annuity_calculator_deposit_amount_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AnnuityCalculatorDepositAmountRequest annuity_calculator_deposit_amount_request: Request payload for Annuity Calculator - Deposit Amount (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.annuity_calculator_deposit_amount_with_http_info(annuity_calculator_deposit_amount_request, **kwargs) # noqa: E501
else:
(data) = self.annuity_calculator_deposit_amount_with_http_info(annuity_calculator_deposit_amount_request, **kwargs) # noqa: E501
return data
def annuity_calculator_deposit_amount_with_http_info(self, annuity_calculator_deposit_amount_request, **kwargs): # noqa: E501
"""Annuity Calculator - Deposit Amount # noqa: E501
Calculate the necessary periodic deposit amount for a fixed annuity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.annuity_calculator_deposit_amount_with_http_info(annuity_calculator_deposit_amount_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AnnuityCalculatorDepositAmountRequest annuity_calculator_deposit_amount_request: Request payload for Annuity Calculator - Deposit Amount (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['annuity_calculator_deposit_amount_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method annuity_calculator_deposit_amount" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'annuity_calculator_deposit_amount_request' is set
if self.api_client.client_side_validation and ('annuity_calculator_deposit_amount_request' not in params or
params['annuity_calculator_deposit_amount_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `annuity_calculator_deposit_amount_request` when calling `annuity_calculator_deposit_amount`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'annuity_calculator_deposit_amount_request' in params:
body_params = params['annuity_calculator_deposit_amount_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/annuity_calculator/deposit_amount', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, object)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def annuity_calculator_initial_balance(self, annuity_calculator_initial_balance_request, **kwargs): # noqa: E501
"""Annuity Calculator - Initial Balance # noqa: E501
Calculate the necessary initial balance for a fixed annuity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.annuity_calculator_initial_balance(annuity_calculator_initial_balance_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AnnuityCalculatorInitialBalanceRequest annuity_calculator_initial_balance_request: Request payload for Annuity Calculator - Initial Balance (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.annuity_calculator_initial_balance_with_http_info(annuity_calculator_initial_balance_request, **kwargs) # noqa: E501
else:
(data) = self.annuity_calculator_initial_balance_with_http_info(annuity_calculator_initial_balance_request, **kwargs) # noqa: E501
return data
def annuity_calculator_initial_balance_with_http_info(self, annuity_calculator_initial_balance_request, **kwargs): # noqa: E501
"""Annuity Calculator - Initial Balance # noqa: E501
Calculate the necessary initial balance for a fixed annuity # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.annuity_calculator_initial_balance_with_http_info(annuity_calculator_initial_balance_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param AnnuityCalculatorInitialBalanceRequest annuity_calculator_initial_balance_request: Request payload for Annuity Calculator - Initial Balance (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['annuity_calculator_initial_balance_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method annuity_calculator_initial_balance" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'annuity_calculator_initial_balance_request' is set
if self.api_client.client_side_validation and ('annuity_calculator_initial_balance_request' not in params or
params['annuity_calculator_initial_balance_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `annuity_calculator_initial_balance_request` when calling `annuity_calculator_initial_balance`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'annuity_calculator_initial_balance_request' in params:
body_params = params['annuity_calculator_initial_balance_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/annuity_calculator/initial_balance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, object)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def variable_annuity(self, variable_annuity_request, **kwargs): # noqa: E501
"""Variable Annuity # noqa: E501
Project the behavior of a variable annuity over time # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.variable_annuity(variable_annuity_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param VariableAnnuityRequest variable_annuity_request: Request payload for Variable Annuity (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.variable_annuity_with_http_info(variable_annuity_request, **kwargs) # noqa: E501
else:
(data) = self.variable_annuity_with_http_info(variable_annuity_request, **kwargs) # noqa: E501
return data
def variable_annuity_with_http_info(self, variable_annuity_request, **kwargs): # noqa: E501
"""Variable Annuity # noqa: E501
Project the behavior of a variable annuity over time # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.variable_annuity_with_http_info(variable_annuity_request, async_req=True)
>>> result = thread.get()
:param async_req bool
:param VariableAnnuityRequest variable_annuity_request: Request payload for Variable Annuity (required)
:return: dict(str, object)
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['variable_annuity_request'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method variable_annuity" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'variable_annuity_request' is set
if self.api_client.client_side_validation and ('variable_annuity_request' not in params or
params['variable_annuity_request'] is None): # noqa: E501
raise ValueError("Missing the required parameter `variable_annuity_request` when calling `variable_annuity`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'variable_annuity_request' in params:
body_params = params['variable_annuity_request']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['oauth2'] # noqa: E501
return self.api_client.call_api(
'/variable_annuity', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='dict(str, object)', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 46.783784
| 181
| 0.666735
| 3,213
| 29,427
| 5.781824
| 0.056022
| 0.128115
| 0.04371
| 0.054261
| 0.964526
| 0.948808
| 0.925607
| 0.912957
| 0.89853
| 0.874092
| 0
| 0.014024
| 0.258504
| 29,427
| 628
| 182
| 46.85828
| 0.837351
| 0.3473
| 0
| 0.738739
| 1
| 0
| 0.229161
| 0.134318
| 0
| 0
| 0
| 0
| 0
| 1
| 0.039039
| false
| 0
| 0.012012
| 0
| 0.108108
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
e677ba223311ea28c6b033d16448eae98d08894b
| 42,078
|
py
|
Python
|
dingtalk/python/alibabacloud_dingtalk/workflow_1_0/client.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 15
|
2020-08-27T04:10:26.000Z
|
2022-03-07T06:25:42.000Z
|
dingtalk/python/alibabacloud_dingtalk/workflow_1_0/client.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 1
|
2020-09-27T01:30:46.000Z
|
2021-12-29T09:15:34.000Z
|
dingtalk/python/alibabacloud_dingtalk/workflow_1_0/client.py
|
aliyun/dingtalk-sdk
|
ab4f856b8cfe94f6b69f10a0730a2e5a7d4901c5
|
[
"Apache-2.0"
] | 5
|
2020-08-27T04:07:44.000Z
|
2021-12-03T02:55:20.000Z
|
# -*- coding: utf-8 -*-
# This file is auto-generated, don't edit it. Thanks.
from Tea.core import TeaCore
from alibabacloud_tea_openapi.client import Client as OpenApiClient
from alibabacloud_tea_openapi import models as open_api_models
from alibabacloud_tea_util.client import Client as UtilClient
from alibabacloud_dingtalk.workflow_1_0 import models as dingtalkworkflow__1__0_models
from alibabacloud_tea_util import models as util_models
from alibabacloud_openapi_util.client import Client as OpenApiUtilClient
class Client(OpenApiClient):
"""
*\
"""
def __init__(
self,
config: open_api_models.Config,
):
super().__init__(config)
self._endpoint_rule = ''
if UtilClient.empty(self._endpoint):
self._endpoint = 'api.dingtalk.com'
def query_form_instance(
self,
request: dingtalkworkflow__1__0_models.QueryFormInstanceRequest,
) -> dingtalkworkflow__1__0_models.QueryFormInstanceResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.QueryFormInstanceHeaders()
return self.query_form_instance_with_options(request, headers, runtime)
async def query_form_instance_async(
self,
request: dingtalkworkflow__1__0_models.QueryFormInstanceRequest,
) -> dingtalkworkflow__1__0_models.QueryFormInstanceResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.QueryFormInstanceHeaders()
return await self.query_form_instance_with_options_async(request, headers, runtime)
def query_form_instance_with_options(
self,
request: dingtalkworkflow__1__0_models.QueryFormInstanceRequest,
headers: dingtalkworkflow__1__0_models.QueryFormInstanceHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.QueryFormInstanceResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.form_instance_id):
query['formInstanceId'] = request.form_instance_id
if not UtilClient.is_unset(request.form_code):
query['formCode'] = request.form_code
if not UtilClient.is_unset(request.app_uuid):
query['appUuid'] = request.app_uuid
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.QueryFormInstanceResponse(),
self.do_roarequest('QueryFormInstance', 'workflow_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/workflow/forms/instances', 'json', req, runtime)
)
async def query_form_instance_with_options_async(
self,
request: dingtalkworkflow__1__0_models.QueryFormInstanceRequest,
headers: dingtalkworkflow__1__0_models.QueryFormInstanceHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.QueryFormInstanceResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.form_instance_id):
query['formInstanceId'] = request.form_instance_id
if not UtilClient.is_unset(request.form_code):
query['formCode'] = request.form_code
if not UtilClient.is_unset(request.app_uuid):
query['appUuid'] = request.app_uuid
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.QueryFormInstanceResponse(),
await self.do_roarequest_async('QueryFormInstance', 'workflow_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/workflow/forms/instances', 'json', req, runtime)
)
def process_forecast(
self,
request: dingtalkworkflow__1__0_models.ProcessForecastRequest,
) -> dingtalkworkflow__1__0_models.ProcessForecastResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.ProcessForecastHeaders()
return self.process_forecast_with_options(request, headers, runtime)
async def process_forecast_async(
self,
request: dingtalkworkflow__1__0_models.ProcessForecastRequest,
) -> dingtalkworkflow__1__0_models.ProcessForecastResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.ProcessForecastHeaders()
return await self.process_forecast_with_options_async(request, headers, runtime)
def process_forecast_with_options(
self,
request: dingtalkworkflow__1__0_models.ProcessForecastRequest,
headers: dingtalkworkflow__1__0_models.ProcessForecastHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.ProcessForecastResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.request_id):
body['RequestId'] = request.request_id
if not UtilClient.is_unset(request.process_code):
body['processCode'] = request.process_code
if not UtilClient.is_unset(request.dept_id):
body['deptId'] = request.dept_id
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
if not UtilClient.is_unset(request.form_component_values):
body['formComponentValues'] = request.form_component_values
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.ProcessForecastResponse(),
self.do_roarequest('ProcessForecast', 'workflow_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/workflow/processes/forecast', 'json', req, runtime)
)
async def process_forecast_with_options_async(
self,
request: dingtalkworkflow__1__0_models.ProcessForecastRequest,
headers: dingtalkworkflow__1__0_models.ProcessForecastHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.ProcessForecastResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.request_id):
body['RequestId'] = request.request_id
if not UtilClient.is_unset(request.process_code):
body['processCode'] = request.process_code
if not UtilClient.is_unset(request.dept_id):
body['deptId'] = request.dept_id
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
if not UtilClient.is_unset(request.form_component_values):
body['formComponentValues'] = request.form_component_values
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.ProcessForecastResponse(),
await self.do_roarequest_async('ProcessForecast', 'workflow_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/workflow/processes/forecast', 'json', req, runtime)
)
def grant_cspace_authorization(
self,
request: dingtalkworkflow__1__0_models.GrantCspaceAuthorizationRequest,
) -> dingtalkworkflow__1__0_models.GrantCspaceAuthorizationResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.GrantCspaceAuthorizationHeaders()
return self.grant_cspace_authorization_with_options(request, headers, runtime)
async def grant_cspace_authorization_async(
self,
request: dingtalkworkflow__1__0_models.GrantCspaceAuthorizationRequest,
) -> dingtalkworkflow__1__0_models.GrantCspaceAuthorizationResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.GrantCspaceAuthorizationHeaders()
return await self.grant_cspace_authorization_with_options_async(request, headers, runtime)
def grant_cspace_authorization_with_options(
self,
request: dingtalkworkflow__1__0_models.GrantCspaceAuthorizationRequest,
headers: dingtalkworkflow__1__0_models.GrantCspaceAuthorizationHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.GrantCspaceAuthorizationResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.space_id):
body['spaceId'] = request.space_id
if not UtilClient.is_unset(request.type):
body['type'] = request.type
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
if not UtilClient.is_unset(request.duration_seconds):
body['durationSeconds'] = request.duration_seconds
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.GrantCspaceAuthorizationResponse(),
self.do_roarequest('GrantCspaceAuthorization', 'workflow_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/workflow/spaces/authorize', 'none', req, runtime)
)
async def grant_cspace_authorization_with_options_async(
self,
request: dingtalkworkflow__1__0_models.GrantCspaceAuthorizationRequest,
headers: dingtalkworkflow__1__0_models.GrantCspaceAuthorizationHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.GrantCspaceAuthorizationResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.space_id):
body['spaceId'] = request.space_id
if not UtilClient.is_unset(request.type):
body['type'] = request.type
if not UtilClient.is_unset(request.user_id):
body['userId'] = request.user_id
if not UtilClient.is_unset(request.duration_seconds):
body['durationSeconds'] = request.duration_seconds
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.GrantCspaceAuthorizationResponse(),
await self.do_roarequest_async('GrantCspaceAuthorization', 'workflow_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/workflow/spaces/authorize', 'none', req, runtime)
)
def query_all_process_instances(
self,
request: dingtalkworkflow__1__0_models.QueryAllProcessInstancesRequest,
) -> dingtalkworkflow__1__0_models.QueryAllProcessInstancesResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.QueryAllProcessInstancesHeaders()
return self.query_all_process_instances_with_options(request, headers, runtime)
async def query_all_process_instances_async(
self,
request: dingtalkworkflow__1__0_models.QueryAllProcessInstancesRequest,
) -> dingtalkworkflow__1__0_models.QueryAllProcessInstancesResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.QueryAllProcessInstancesHeaders()
return await self.query_all_process_instances_with_options_async(request, headers, runtime)
def query_all_process_instances_with_options(
self,
request: dingtalkworkflow__1__0_models.QueryAllProcessInstancesRequest,
headers: dingtalkworkflow__1__0_models.QueryAllProcessInstancesHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.QueryAllProcessInstancesResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.next_token):
query['nextToken'] = request.next_token
if not UtilClient.is_unset(request.max_results):
query['maxResults'] = request.max_results
if not UtilClient.is_unset(request.start_time_in_mills):
query['startTimeInMills'] = request.start_time_in_mills
if not UtilClient.is_unset(request.end_time_in_mills):
query['endTimeInMills'] = request.end_time_in_mills
if not UtilClient.is_unset(request.process_code):
query['processCode'] = request.process_code
if not UtilClient.is_unset(request.app_uuid):
query['appUuid'] = request.app_uuid
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.QueryAllProcessInstancesResponse(),
self.do_roarequest('QueryAllProcessInstances', 'workflow_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/workflow/processes/pages/instances', 'json', req, runtime)
)
async def query_all_process_instances_with_options_async(
self,
request: dingtalkworkflow__1__0_models.QueryAllProcessInstancesRequest,
headers: dingtalkworkflow__1__0_models.QueryAllProcessInstancesHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.QueryAllProcessInstancesResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.next_token):
query['nextToken'] = request.next_token
if not UtilClient.is_unset(request.max_results):
query['maxResults'] = request.max_results
if not UtilClient.is_unset(request.start_time_in_mills):
query['startTimeInMills'] = request.start_time_in_mills
if not UtilClient.is_unset(request.end_time_in_mills):
query['endTimeInMills'] = request.end_time_in_mills
if not UtilClient.is_unset(request.process_code):
query['processCode'] = request.process_code
if not UtilClient.is_unset(request.app_uuid):
query['appUuid'] = request.app_uuid
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.QueryAllProcessInstancesResponse(),
await self.do_roarequest_async('QueryAllProcessInstances', 'workflow_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/workflow/processes/pages/instances', 'json', req, runtime)
)
def query_all_form_instances(
self,
request: dingtalkworkflow__1__0_models.QueryAllFormInstancesRequest,
) -> dingtalkworkflow__1__0_models.QueryAllFormInstancesResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.QueryAllFormInstancesHeaders()
return self.query_all_form_instances_with_options(request, headers, runtime)
async def query_all_form_instances_async(
self,
request: dingtalkworkflow__1__0_models.QueryAllFormInstancesRequest,
) -> dingtalkworkflow__1__0_models.QueryAllFormInstancesResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.QueryAllFormInstancesHeaders()
return await self.query_all_form_instances_with_options_async(request, headers, runtime)
def query_all_form_instances_with_options(
self,
request: dingtalkworkflow__1__0_models.QueryAllFormInstancesRequest,
headers: dingtalkworkflow__1__0_models.QueryAllFormInstancesHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.QueryAllFormInstancesResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.next_token):
query['nextToken'] = request.next_token
if not UtilClient.is_unset(request.max_results):
query['maxResults'] = request.max_results
if not UtilClient.is_unset(request.app_uuid):
query['appUuid'] = request.app_uuid
if not UtilClient.is_unset(request.form_code):
query['formCode'] = request.form_code
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.QueryAllFormInstancesResponse(),
self.do_roarequest('QueryAllFormInstances', 'workflow_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/workflow/forms/pages/instances', 'json', req, runtime)
)
async def query_all_form_instances_with_options_async(
self,
request: dingtalkworkflow__1__0_models.QueryAllFormInstancesRequest,
headers: dingtalkworkflow__1__0_models.QueryAllFormInstancesHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.QueryAllFormInstancesResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.next_token):
query['nextToken'] = request.next_token
if not UtilClient.is_unset(request.max_results):
query['maxResults'] = request.max_results
if not UtilClient.is_unset(request.app_uuid):
query['appUuid'] = request.app_uuid
if not UtilClient.is_unset(request.form_code):
query['formCode'] = request.form_code
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.QueryAllFormInstancesResponse(),
await self.do_roarequest_async('QueryAllFormInstances', 'workflow_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/workflow/forms/pages/instances', 'json', req, runtime)
)
def query_form_by_biz_type(
self,
request: dingtalkworkflow__1__0_models.QueryFormByBizTypeRequest,
) -> dingtalkworkflow__1__0_models.QueryFormByBizTypeResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.QueryFormByBizTypeHeaders()
return self.query_form_by_biz_type_with_options(request, headers, runtime)
async def query_form_by_biz_type_async(
self,
request: dingtalkworkflow__1__0_models.QueryFormByBizTypeRequest,
) -> dingtalkworkflow__1__0_models.QueryFormByBizTypeResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.QueryFormByBizTypeHeaders()
return await self.query_form_by_biz_type_with_options_async(request, headers, runtime)
def query_form_by_biz_type_with_options(
self,
request: dingtalkworkflow__1__0_models.QueryFormByBizTypeRequest,
headers: dingtalkworkflow__1__0_models.QueryFormByBizTypeHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.QueryFormByBizTypeResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.app_uuid):
body['appUuid'] = request.app_uuid
if not UtilClient.is_unset(request.biz_types):
body['bizTypes'] = request.biz_types
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.QueryFormByBizTypeResponse(),
self.do_roarequest('QueryFormByBizType', 'workflow_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/workflow/forms/forminfos/query', 'json', req, runtime)
)
async def query_form_by_biz_type_with_options_async(
self,
request: dingtalkworkflow__1__0_models.QueryFormByBizTypeRequest,
headers: dingtalkworkflow__1__0_models.QueryFormByBizTypeHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.QueryFormByBizTypeResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.app_uuid):
body['appUuid'] = request.app_uuid
if not UtilClient.is_unset(request.biz_types):
body['bizTypes'] = request.biz_types
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.QueryFormByBizTypeResponse(),
await self.do_roarequest_async('QueryFormByBizType', 'workflow_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/workflow/forms/forminfos/query', 'json', req, runtime)
)
def form_create(
self,
request: dingtalkworkflow__1__0_models.FormCreateRequest,
) -> dingtalkworkflow__1__0_models.FormCreateResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.FormCreateHeaders()
return self.form_create_with_options(request, headers, runtime)
async def form_create_async(
self,
request: dingtalkworkflow__1__0_models.FormCreateRequest,
) -> dingtalkworkflow__1__0_models.FormCreateResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.FormCreateHeaders()
return await self.form_create_with_options_async(request, headers, runtime)
def form_create_with_options(
self,
request: dingtalkworkflow__1__0_models.FormCreateRequest,
headers: dingtalkworkflow__1__0_models.FormCreateHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.FormCreateResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.request_id):
body['RequestId'] = request.request_id
if not UtilClient.is_unset(request.process_code):
body['processCode'] = request.process_code
if not UtilClient.is_unset(request.name):
body['name'] = request.name
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.form_components):
body['formComponents'] = request.form_components
if not UtilClient.is_unset(request.template_config):
body['templateConfig'] = request.template_config
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.FormCreateResponse(),
self.do_roarequest('FormCreate', 'workflow_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/workflow/forms', 'json', req, runtime)
)
async def form_create_with_options_async(
self,
request: dingtalkworkflow__1__0_models.FormCreateRequest,
headers: dingtalkworkflow__1__0_models.FormCreateHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.FormCreateResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
if not UtilClient.is_unset(request.request_id):
body['RequestId'] = request.request_id
if not UtilClient.is_unset(request.process_code):
body['processCode'] = request.process_code
if not UtilClient.is_unset(request.name):
body['name'] = request.name
if not UtilClient.is_unset(request.description):
body['description'] = request.description
if not UtilClient.is_unset(request.form_components):
body['formComponents'] = request.form_components
if not UtilClient.is_unset(request.template_config):
body['templateConfig'] = request.template_config
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.FormCreateResponse(),
await self.do_roarequest_async('FormCreate', 'workflow_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/workflow/forms', 'json', req, runtime)
)
def query_schema_by_process_code(
self,
request: dingtalkworkflow__1__0_models.QuerySchemaByProcessCodeRequest,
) -> dingtalkworkflow__1__0_models.QuerySchemaByProcessCodeResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.QuerySchemaByProcessCodeHeaders()
return self.query_schema_by_process_code_with_options(request, headers, runtime)
async def query_schema_by_process_code_async(
self,
request: dingtalkworkflow__1__0_models.QuerySchemaByProcessCodeRequest,
) -> dingtalkworkflow__1__0_models.QuerySchemaByProcessCodeResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.QuerySchemaByProcessCodeHeaders()
return await self.query_schema_by_process_code_with_options_async(request, headers, runtime)
def query_schema_by_process_code_with_options(
self,
request: dingtalkworkflow__1__0_models.QuerySchemaByProcessCodeRequest,
headers: dingtalkworkflow__1__0_models.QuerySchemaByProcessCodeHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.QuerySchemaByProcessCodeResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.process_code):
query['processCode'] = request.process_code
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.QuerySchemaByProcessCodeResponse(),
self.do_roarequest('QuerySchemaByProcessCode', 'workflow_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/workflow/forms/schemas/processCodes', 'json', req, runtime)
)
async def query_schema_by_process_code_with_options_async(
self,
request: dingtalkworkflow__1__0_models.QuerySchemaByProcessCodeRequest,
headers: dingtalkworkflow__1__0_models.QuerySchemaByProcessCodeHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.QuerySchemaByProcessCodeResponse:
UtilClient.validate_model(request)
query = {}
if not UtilClient.is_unset(request.process_code):
query['processCode'] = request.process_code
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
query=OpenApiUtilClient.query(query)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.QuerySchemaByProcessCodeResponse(),
await self.do_roarequest_async('QuerySchemaByProcessCode', 'workflow_1.0', 'HTTP', 'GET', 'AK', f'/v1.0/workflow/forms/schemas/processCodes', 'json', req, runtime)
)
def start_process_instance(
self,
request: dingtalkworkflow__1__0_models.StartProcessInstanceRequest,
) -> dingtalkworkflow__1__0_models.StartProcessInstanceResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.StartProcessInstanceHeaders()
return self.start_process_instance_with_options(request, headers, runtime)
async def start_process_instance_async(
self,
request: dingtalkworkflow__1__0_models.StartProcessInstanceRequest,
) -> dingtalkworkflow__1__0_models.StartProcessInstanceResponse:
runtime = util_models.RuntimeOptions()
headers = dingtalkworkflow__1__0_models.StartProcessInstanceHeaders()
return await self.start_process_instance_with_options_async(request, headers, runtime)
def start_process_instance_with_options(
self,
request: dingtalkworkflow__1__0_models.StartProcessInstanceRequest,
headers: dingtalkworkflow__1__0_models.StartProcessInstanceHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.StartProcessInstanceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.originator_user_id):
body['originatorUserId'] = request.originator_user_id
if not UtilClient.is_unset(request.process_code):
body['processCode'] = request.process_code
if not UtilClient.is_unset(request.dept_id):
body['deptId'] = request.dept_id
if not UtilClient.is_unset(request.microapp_agent_id):
body['microappAgentId'] = request.microapp_agent_id
if not UtilClient.is_unset(request.approvers):
body['approvers'] = request.approvers
if not UtilClient.is_unset(request.cc_list):
body['ccList'] = request.cc_list
if not UtilClient.is_unset(request.cc_position):
body['ccPosition'] = request.cc_position
if not UtilClient.is_unset(request.target_select_actioners):
body['targetSelectActioners'] = request.target_select_actioners
if not UtilClient.is_unset(request.form_component_values):
body['formComponentValues'] = request.form_component_values
if not UtilClient.is_unset(request.request_id):
body['RequestId'] = request.request_id
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.StartProcessInstanceResponse(),
self.do_roarequest('StartProcessInstance', 'workflow_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/workflow/processInstances', 'json', req, runtime)
)
async def start_process_instance_with_options_async(
self,
request: dingtalkworkflow__1__0_models.StartProcessInstanceRequest,
headers: dingtalkworkflow__1__0_models.StartProcessInstanceHeaders,
runtime: util_models.RuntimeOptions,
) -> dingtalkworkflow__1__0_models.StartProcessInstanceResponse:
UtilClient.validate_model(request)
body = {}
if not UtilClient.is_unset(request.originator_user_id):
body['originatorUserId'] = request.originator_user_id
if not UtilClient.is_unset(request.process_code):
body['processCode'] = request.process_code
if not UtilClient.is_unset(request.dept_id):
body['deptId'] = request.dept_id
if not UtilClient.is_unset(request.microapp_agent_id):
body['microappAgentId'] = request.microapp_agent_id
if not UtilClient.is_unset(request.approvers):
body['approvers'] = request.approvers
if not UtilClient.is_unset(request.cc_list):
body['ccList'] = request.cc_list
if not UtilClient.is_unset(request.cc_position):
body['ccPosition'] = request.cc_position
if not UtilClient.is_unset(request.target_select_actioners):
body['targetSelectActioners'] = request.target_select_actioners
if not UtilClient.is_unset(request.form_component_values):
body['formComponentValues'] = request.form_component_values
if not UtilClient.is_unset(request.request_id):
body['RequestId'] = request.request_id
if not UtilClient.is_unset(request.ding_corp_id):
body['dingCorpId'] = request.ding_corp_id
if not UtilClient.is_unset(request.ding_org_id):
body['dingOrgId'] = request.ding_org_id
if not UtilClient.is_unset(request.ding_isv_org_id):
body['dingIsvOrgId'] = request.ding_isv_org_id
if not UtilClient.is_unset(request.ding_suite_key):
body['dingSuiteKey'] = request.ding_suite_key
if not UtilClient.is_unset(request.ding_token_grant_type):
body['dingTokenGrantType'] = request.ding_token_grant_type
real_headers = {}
if not UtilClient.is_unset(headers.common_headers):
real_headers = headers.common_headers
if not UtilClient.is_unset(headers.x_acs_dingtalk_access_token):
real_headers['x-acs-dingtalk-access-token'] = headers.x_acs_dingtalk_access_token
req = open_api_models.OpenApiRequest(
headers=real_headers,
body=OpenApiUtilClient.parse_to_map(body)
)
return TeaCore.from_map(
dingtalkworkflow__1__0_models.StartProcessInstanceResponse(),
await self.do_roarequest_async('StartProcessInstance', 'workflow_1.0', 'HTTP', 'POST', 'AK', f'/v1.0/workflow/processInstances', 'json', req, runtime)
)
| 52.012361
| 175
| 0.707163
| 4,610
| 42,078
| 6.05141
| 0.044902
| 0.028318
| 0.084955
| 0.096283
| 0.97584
| 0.955838
| 0.94641
| 0.930279
| 0.910564
| 0.896727
| 0
| 0.009885
| 0.208993
| 42,078
| 808
| 176
| 52.076733
| 0.828266
| 0.001901
| 0
| 0.839005
| 1
| 0
| 0.077172
| 0.03025
| 0
| 0
| 0
| 0
| 0
| 1
| 0.024869
| false
| 0
| 0.009162
| 0
| 0.082461
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fc5d01611249f378e24df5414341bfa8f5b7a62d
| 20,752
|
py
|
Python
|
tests/test_routing.py
|
tswicegood/steinie
|
aab4bcc5d912d1da6fd0e5ea59dfe23196d1ca32
|
[
"Apache-2.0"
] | 4
|
2015-04-15T22:45:59.000Z
|
2020-07-09T04:30:59.000Z
|
tests/test_routing.py
|
tswicegood/steinie
|
aab4bcc5d912d1da6fd0e5ea59dfe23196d1ca32
|
[
"Apache-2.0"
] | 2
|
2015-03-13T16:00:21.000Z
|
2015-04-15T23:55:09.000Z
|
tests/test_routing.py
|
tswicegood/steinie
|
aab4bcc5d912d1da6fd0e5ea59dfe23196d1ca32
|
[
"Apache-2.0"
] | null | null | null |
import random
from unittest import TestCase
import mock
import werkzeug
from steinie import app
from steinie import routing
from . import utils
def generate_example_environ(method="GET"):
return {
'HTTP_HOST': 'example.com',
'PATH_INFO': '/',
'REQUEST_METHOD': method,
'wsgi.url_scheme': ('http', '80'),
}
def generate_mock_request(environ=None):
if environ is None:
environ = generate_example_environ()
return mock.Mock(path="/bar/foo", environ=environ)
class DecoratedFunctionsWithRawMethodTestCase(TestCase):
def test_can_decorate_with_multiples(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.method("/", methods=["GET", "POST"])
def index(request, response):
return r
environ = generate_example_environ(method='GET')
request = mock.Mock(path='/', environ=environ)
get_response = router.handle(request, mock.Mock())
self.assertEqual(r, get_response)
environ = generate_example_environ(method='POST')
post_response = router.handle(request, mock.Mock())
self.assertEqual(r, post_response)
self.assertEqual(get_response, post_response)
class NestedRoutingTestCase(TestCase):
def test_allows_tested_router(self):
r1 = routing.Router()
@r1.get("/foo")
def handle_foo(request, response):
return "\n".join([
"request.path: %s" % request.path,
"request.original_path: %s" % request.original_path,
])
r2 = routing.Router()
r2.use("/bar", r1)
request = mock.Mock(path="/bar/foo", environ=generate_example_environ())
response = r2.handle(request, mock.Mock())
expected = "\n".join([
"request.path: /foo",
"request.original_path: /bar/foo",
])
self.assertEqual(expected, response)
def test_middleware_is_instaniated_with_route(self):
Middleware = mock.Mock()
r = routing.Router()
r.use(Middleware)
@r.get("/foo")
def handler(*args):
pass
a = app.Steinie()
a.use("/bar", r)
a.handle(generate_mock_request(), mock.Mock())
Middleware.assert_called_once_with(r)
def test_parameters_are_not_shared_with_parents_or_siblings(self):
r1 = routing.Router()
@r1.param("foo")
def foo_param(param):
return "foo"
@r1.get("/<foo:foo>")
def foo_handler(request, response):
return request.params
r2 = routing.Router()
@r2.param("bar")
def bar_param(param):
return "bar"
@r2.get("/<bar:bar>")
def bar_handler(request, response):
return request.params
a = app.Steinie()
a.use("/foo", r1)
a.use("/bar", r2)
@a.get("/")
def handler(request, response):
return request.params
request = mock.Mock(path="/", environ=generate_example_environ())
response = a.handle(request, mock.Mock())
self.assertEqual({}, response)
request.path = "/foo/bar"
self.assertEqual({"foo": "foo"}, a.handle(request, mock.Mock()))
request.path = "/bar/foo"
self.assertEqual({"bar": "bar"}, a.handle(request, mock.Mock()))
class ParamFunctionTestCase(TestCase):
def test_basic_router(self):
num = random.randint(1000, 2000)
router = routing.Router()
expected = "foo{}".format(random.randint(100, 200))
call_count = []
@router.param("bar")
def bar_to_upper(param):
return param.upper()
@router.get("/<bar:baz>/")
def parameter(request):
call_count.append(num)
self.assertIn('baz', request.params)
self.assertEqual(request.params['baz'], expected.upper())
path = "/{0}/".format(expected)
request = mock.Mock(path=path, environ=generate_example_environ())
router.handle(request, mock.Mock())
self.assert_(len(call_count) == 1)
self.assertIn(num, call_count)
def test_wraps_existing_func(self):
router = routing.Router()
@router.param("bar")
def bar_to_upper(param):
return param.upper()
self.assertEqual(bar_to_upper("foo"), "FOO")
self.assertEqual(bar_to_upper.__name__, "bar_to_upper")
def test_supports_nested_params(self):
num = random.randint(1000, 2000)
router = routing.Router()
expected = "foo{}".format(random.randint(100, 200))
call_count = []
@router.param("bar")
def bar_to_upper(param):
return param.upper()
@router.get("/<bar:baz>/")
def parameter(request):
call_count.append(num)
self.assertIn('baz', request.params)
self.assertEqual(request.params['baz'], expected.upper())
path = "/{0}/".format(expected)
request = mock.Mock(path=path, environ=generate_example_environ())
router.handle(request, mock.Mock())
self.assert_(len(call_count) == 1)
self.assertIn(num, call_count)
router2 = routing.Router()
router2.use("/", router)
router2.handle(request, mock.Mock())
self.assert_(len(call_count) == 2)
class DecoratedDeleteFunctionsTestCase(TestCase):
def test_wrapps_existing_func(self):
router = routing.Router()
@router.delete("/")
def index(request, response):
return request.path
random_path = "/foo/bar/%s" % random.randint(100, 200)
request = mock.Mock(path=random_path)
self.assertEqual(index(request, mock.Mock()), random_path)
self.assertEqual(index.__name__, "index")
def test_is_dispatched_to_via_handle(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.delete("/")
def index(request, response):
return r
post_environ = generate_example_environ(method='DELETE')
request = mock.Mock(path='/', environ=post_environ)
response = router.handle(request, mock.Mock())
self.assertEqual(r, response)
def test_does_not_match_on_get_or_post(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.delete("/")
def index(request, response):
return r
get_environ = generate_example_environ(method='GET')
request = mock.Mock(path='/', environ=get_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
post_environ = generate_example_environ(method='POST')
request = mock.Mock(path='/', environ=post_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
class DecoratedHeadFunctionsTestCase(TestCase):
def test_wrapps_existing_func(self):
router = routing.Router()
@router.head("/")
def index(request, response):
return request.path
random_path = "/foo/bar/%s" % random.randint(100, 200)
request = mock.Mock(path=random_path)
self.assertEqual(index(request, mock.Mock()), random_path)
self.assertEqual(index.__name__, "index")
def test_is_dispatched_to_via_handle(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.head("/")
def index(request, response):
return r
post_environ = generate_example_environ(method='HEAD')
request = mock.Mock(path='/', environ=post_environ)
response = router.handle(request, mock.Mock())
self.assertEqual(r, response)
def test_does_not_match_on_get_or_post(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.head("/")
def index(request, response):
return r
get_environ = generate_example_environ(method='GET')
request = mock.Mock(path='/', environ=get_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
post_environ = generate_example_environ(method='POST')
request = mock.Mock(path='/', environ=post_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
class DecoratedInfoFunctionsTestCase(TestCase):
def test_wrapps_existing_func(self):
router = routing.Router()
@router.info("/")
def index(request, response):
return request.path
random_path = "/foo/bar/%s" % random.randint(100, 200)
request = mock.Mock(path=random_path)
self.assertEqual(index(request, mock.Mock()), random_path)
self.assertEqual(index.__name__, "index")
def test_is_dispatched_to_via_handle(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.info("/")
def index(request, response):
return r
post_environ = generate_example_environ(method='INFO')
request = mock.Mock(path='/', environ=post_environ)
response = router.handle(request, mock.Mock())
self.assertEqual(r, response)
def test_does_not_match_on_get_or_post(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.info("/")
def index(request, response):
return r
get_environ = generate_example_environ(method='GET')
request = mock.Mock(path='/', environ=get_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
post_environ = generate_example_environ(method='POST')
request = mock.Mock(path='/', environ=post_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
class DecoratedOptionFunctionsTestCase(TestCase):
def test_wrapps_existing_func(self):
router = routing.Router()
@router.options("/")
def index(request, response):
return request.path
random_path = "/foo/bar/%s" % random.randint(100, 200)
request = mock.Mock(path=random_path)
self.assertEqual(index(request, mock.Mock()), random_path)
self.assertEqual(index.__name__, "index")
def test_is_dispatched_to_via_handle(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.options("/")
def index(request, response):
return r
environ = generate_example_environ(method='OPTIONS')
request = mock.Mock(path='/', environ=environ)
response = router.handle(request, mock.Mock())
self.assertEqual(r, response)
def test_does_not_match_on_get_or_post(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.options("/")
def index(request, response):
return r
get_environ = generate_example_environ(method='GET')
request = mock.Mock(path='/', environ=get_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
post_environ = generate_example_environ(method='POST')
request = mock.Mock(path='/', environ=post_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
class DecoratedPatchFunctionsTestCase(TestCase):
def test_wrapps_existing_func(self):
router = routing.Router()
@router.patch("/")
def index(request, response):
return request.path
random_path = "/foo/bar/%s" % random.randint(100, 200)
request = mock.Mock(path=random_path)
self.assertEqual(index(request, mock.Mock()), random_path)
self.assertEqual(index.__name__, "index")
def test_is_dispatched_to_via_handle(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.patch("/")
def index(request, response):
return r
environ = generate_example_environ(method='PATCH')
request = mock.Mock(path='/', environ=environ)
response = router.handle(request, mock.Mock())
self.assertEqual(r, response)
def test_does_not_match_on_get_or_post(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.patch("/")
def index(request, response):
return r
get_environ = generate_example_environ(method='GET')
request = mock.Mock(path='/', environ=get_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
post_environ = generate_example_environ(method='POST')
request = mock.Mock(path='/', environ=post_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
class DecoratedPutFunctionsTestCase(TestCase):
def test_wrapps_existing_func(self):
router = routing.Router()
@router.put("/")
def index(request, response):
return request.path
random_path = "/foo/bar/%s" % random.randint(100, 200)
request = mock.Mock(path=random_path)
self.assertEqual(index(request, mock.Mock()), random_path)
self.assertEqual(index.__name__, "index")
def test_is_dispatched_to_via_handle(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.put("/")
def index(request, response):
return r
environ = generate_example_environ(method='PUT')
request = mock.Mock(path='/', environ=environ)
response = router.handle(request, mock.Mock())
self.assertEqual(r, response)
def test_does_not_match_on_get_or_post(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.put("/")
def index(request, response):
return r
get_environ = generate_example_environ(method='GET')
request = mock.Mock(path='/', environ=get_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
post_environ = generate_example_environ(method='POST')
request = mock.Mock(path='/', environ=post_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
class DecoratedPostFunctionsTestCase(TestCase):
def test_wraps_existing_func(self):
router = routing.Router()
@router.post("/")
def index(request):
return request.path
random_path = "/foo/bar/%s" % random.randint(100, 200)
request = mock.Mock(path=random_path)
self.assertEqual(index(request), random_path)
self.assertEqual(index.__name__, "index")
def test_is_dispatched_to_via_handle(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.post("/")
def index(request):
return r
post_environ = generate_example_environ(method='POST')
request = mock.Mock(path='/', environ=post_environ)
response = router.handle(request, mock.Mock())
self.assertEqual(r, response)
def test_does_not_match_on_get(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.post("/")
def index(request, response):
return r
post_environ = generate_example_environ(method='GET')
request = mock.Mock(path='/', environ=post_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
class DecoratedTraceFunctionsTestCase(TestCase):
def test_wrapps_existing_func(self):
router = routing.Router()
@router.trace("/")
def index(request, response):
return request.path
random_path = "/foo/bar/%s" % random.randint(100, 200)
request = mock.Mock(path=random_path)
self.assertEqual(index(request, mock.Mock()), random_path)
self.assertEqual(index.__name__, "index")
def test_is_dispatched_to_via_handle(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.trace("/")
def index(request, response):
return r
environ = generate_example_environ(method='TRACE')
request = mock.Mock(path='/', environ=environ)
response = router.handle(request, mock.Mock())
self.assertEqual(r, response)
def test_does_not_match_on_get_or_post(self):
r = random.randint(1000, 2000)
router = routing.Router()
@router.trace("/")
def index(request, response):
return r
get_environ = generate_example_environ(method='GET')
request = mock.Mock(path='/', environ=get_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
post_environ = generate_example_environ(method='POST')
request = mock.Mock(path='/', environ=post_environ)
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
class DecoratedGetFunctionsTestCase(TestCase):
def test_wraps_existing_func(self):
router = routing.Router()
@router.get("/")
def index(request):
return request.path
random_path = "/foo/bar/%s" % random.randint(100, 200)
request = mock.Mock(path=random_path)
self.assertEqual(index(request), random_path)
self.assertEqual(index.__name__, "index")
def test_does_not_match_on_post(self):
router = routing.Router()
@router.get("/")
def index(request):
return request.path
post_environ = generate_example_environ(method='POST')
request = mock.Mock(path="/", environ=post_environ, method='POST')
with self.assertRaises(werkzeug.exceptions.MethodNotAllowed):
router.handle(request, mock.Mock())
class MiddlewareTestCase(TestCase):
def test_allows_using_middleware(self):
class Middleware(object):
def __init__(self, app):
pass
def __call__(self, request, response, _next):
response.data = "MIDDLEWARE INVOKED"
return response
a = app.Steinie()
a.use(Middleware)
with utils.run_app(a):
response = utils.get("http://localhost:5151/baz")
self.assertIn("MIDDLEWARE INVOKED", response.content)
def test_allows_using_middleware_from_nested_routers(self):
class Middleware(object):
def __init__(self, app):
pass
def __call__(self, request, response, _next):
response.data = "MIDDLEWARE INVOKED"
return response
r = routing.Router()
r.use(Middleware)
@r.get("/baz")
def get(request):
pass
a = app.Steinie()
a.use('/', r)
with utils.run_app(a):
response = utils.get("http://localhost:5151/baz")
self.assertIn("MIDDLEWARE INVOKED", response.content)
def test_dispatches_if_next_is_called(self):
class Middleware(object):
def __init__(self, app):
pass
def __call__(self, request, response, _next):
return _next(request, response)
a = app.Steinie()
a.use(Middleware)
@a.get("/foo")
def get(request, response):
return "Hello from the route"
with utils.run_app(a):
response = utils.get("http://localhost:5151/foo")
self.assertIn("Hello from the route", response.content)
def test_does_not_call_root_if_next_is_not_called(self):
class Middleware(object):
def __init__(self, app):
pass
def __call__(self, request, response, _next):
pass
a = app.Steinie()
a.use(Middleware)
@a.get("/foo")
def get(request, response):
return "Should never see this"
with utils.run_app(a):
response = utils.get("http://localhost:5151/foo")
self.assertEqual('', response.content)
| 30.698225
| 80
| 0.617049
| 2,288
| 20,752
| 5.405157
| 0.065997
| 0.052398
| 0.09582
| 0.058381
| 0.853077
| 0.822916
| 0.797768
| 0.794776
| 0.789521
| 0.776987
| 0
| 0.016791
| 0.259589
| 20,752
| 675
| 81
| 30.743704
| 0.78809
| 0
| 0
| 0.766046
| 1
| 0
| 0.043369
| 0.00212
| 0
| 0
| 0
| 0
| 0.134576
| 1
| 0.184265
| false
| 0.014493
| 0.014493
| 0.082816
| 0.322981
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
fc5dff9cca7e8dd2051eabba39a529b36768b720
| 15,280
|
py
|
Python
|
sdk/python/pulumi_databricks/databricks/sql_permissions.py
|
ingenii-solutions/pulumi-databricks
|
f03ecc4e190a4e59eb635663f6408350dcab42ea
|
[
"ECL-2.0",
"Apache-2.0"
] | 2
|
2021-12-10T07:35:59.000Z
|
2022-03-23T22:53:55.000Z
|
sdk/python/pulumi_databricks/databricks/sql_permissions.py
|
ingenii-solutions/pulumi-databricks
|
f03ecc4e190a4e59eb635663f6408350dcab42ea
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_databricks/databricks/sql_permissions.py
|
ingenii-solutions/pulumi-databricks
|
f03ecc4e190a4e59eb635663f6408350dcab42ea
|
[
"ECL-2.0",
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
from . import outputs
from ._inputs import *
__all__ = ['SqlPermissionsArgs', 'SqlPermissions']
@pulumi.input_type
class SqlPermissionsArgs:
def __init__(__self__, *,
anonymous_function: Optional[pulumi.Input[bool]] = None,
any_file: Optional[pulumi.Input[bool]] = None,
catalog: Optional[pulumi.Input[bool]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
database: Optional[pulumi.Input[str]] = None,
privilege_assignments: Optional[pulumi.Input[Sequence[pulumi.Input['SqlPermissionsPrivilegeAssignmentArgs']]]] = None,
table: Optional[pulumi.Input[str]] = None,
view: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a SqlPermissions resource.
"""
if anonymous_function is not None:
pulumi.set(__self__, "anonymous_function", anonymous_function)
if any_file is not None:
pulumi.set(__self__, "any_file", any_file)
if catalog is not None:
pulumi.set(__self__, "catalog", catalog)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if database is not None:
pulumi.set(__self__, "database", database)
if privilege_assignments is not None:
pulumi.set(__self__, "privilege_assignments", privilege_assignments)
if table is not None:
pulumi.set(__self__, "table", table)
if view is not None:
pulumi.set(__self__, "view", view)
@property
@pulumi.getter(name="anonymousFunction")
def anonymous_function(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "anonymous_function")
@anonymous_function.setter
def anonymous_function(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "anonymous_function", value)
@property
@pulumi.getter(name="anyFile")
def any_file(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "any_file")
@any_file.setter
def any_file(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "any_file", value)
@property
@pulumi.getter
def catalog(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "catalog")
@catalog.setter
def catalog(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "catalog", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter
def database(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "database")
@database.setter
def database(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database", value)
@property
@pulumi.getter(name="privilegeAssignments")
def privilege_assignments(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SqlPermissionsPrivilegeAssignmentArgs']]]]:
return pulumi.get(self, "privilege_assignments")
@privilege_assignments.setter
def privilege_assignments(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SqlPermissionsPrivilegeAssignmentArgs']]]]):
pulumi.set(self, "privilege_assignments", value)
@property
@pulumi.getter
def table(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "table")
@table.setter
def table(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "table", value)
@property
@pulumi.getter
def view(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "view")
@view.setter
def view(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "view", value)
@pulumi.input_type
class _SqlPermissionsState:
def __init__(__self__, *,
anonymous_function: Optional[pulumi.Input[bool]] = None,
any_file: Optional[pulumi.Input[bool]] = None,
catalog: Optional[pulumi.Input[bool]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
database: Optional[pulumi.Input[str]] = None,
privilege_assignments: Optional[pulumi.Input[Sequence[pulumi.Input['SqlPermissionsPrivilegeAssignmentArgs']]]] = None,
table: Optional[pulumi.Input[str]] = None,
view: Optional[pulumi.Input[str]] = None):
"""
Input properties used for looking up and filtering SqlPermissions resources.
"""
if anonymous_function is not None:
pulumi.set(__self__, "anonymous_function", anonymous_function)
if any_file is not None:
pulumi.set(__self__, "any_file", any_file)
if catalog is not None:
pulumi.set(__self__, "catalog", catalog)
if cluster_id is not None:
pulumi.set(__self__, "cluster_id", cluster_id)
if database is not None:
pulumi.set(__self__, "database", database)
if privilege_assignments is not None:
pulumi.set(__self__, "privilege_assignments", privilege_assignments)
if table is not None:
pulumi.set(__self__, "table", table)
if view is not None:
pulumi.set(__self__, "view", view)
@property
@pulumi.getter(name="anonymousFunction")
def anonymous_function(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "anonymous_function")
@anonymous_function.setter
def anonymous_function(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "anonymous_function", value)
@property
@pulumi.getter(name="anyFile")
def any_file(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "any_file")
@any_file.setter
def any_file(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "any_file", value)
@property
@pulumi.getter
def catalog(self) -> Optional[pulumi.Input[bool]]:
return pulumi.get(self, "catalog")
@catalog.setter
def catalog(self, value: Optional[pulumi.Input[bool]]):
pulumi.set(self, "catalog", value)
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "cluster_id")
@cluster_id.setter
def cluster_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "cluster_id", value)
@property
@pulumi.getter
def database(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "database")
@database.setter
def database(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "database", value)
@property
@pulumi.getter(name="privilegeAssignments")
def privilege_assignments(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['SqlPermissionsPrivilegeAssignmentArgs']]]]:
return pulumi.get(self, "privilege_assignments")
@privilege_assignments.setter
def privilege_assignments(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['SqlPermissionsPrivilegeAssignmentArgs']]]]):
pulumi.set(self, "privilege_assignments", value)
@property
@pulumi.getter
def table(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "table")
@table.setter
def table(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "table", value)
@property
@pulumi.getter
def view(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "view")
@view.setter
def view(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "view", value)
class SqlPermissions(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
anonymous_function: Optional[pulumi.Input[bool]] = None,
any_file: Optional[pulumi.Input[bool]] = None,
catalog: Optional[pulumi.Input[bool]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
database: Optional[pulumi.Input[str]] = None,
privilege_assignments: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SqlPermissionsPrivilegeAssignmentArgs']]]]] = None,
table: Optional[pulumi.Input[str]] = None,
view: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Create a SqlPermissions resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: Optional[SqlPermissionsArgs] = None,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Create a SqlPermissions resource with the given unique name, props, and options.
:param str resource_name: The name of the resource.
:param SqlPermissionsArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(SqlPermissionsArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
anonymous_function: Optional[pulumi.Input[bool]] = None,
any_file: Optional[pulumi.Input[bool]] = None,
catalog: Optional[pulumi.Input[bool]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
database: Optional[pulumi.Input[str]] = None,
privilege_assignments: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SqlPermissionsPrivilegeAssignmentArgs']]]]] = None,
table: Optional[pulumi.Input[str]] = None,
view: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = SqlPermissionsArgs.__new__(SqlPermissionsArgs)
__props__.__dict__["anonymous_function"] = anonymous_function
__props__.__dict__["any_file"] = any_file
__props__.__dict__["catalog"] = catalog
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["database"] = database
__props__.__dict__["privilege_assignments"] = privilege_assignments
__props__.__dict__["table"] = table
__props__.__dict__["view"] = view
super(SqlPermissions, __self__).__init__(
'databricks:databricks/sqlPermissions:SqlPermissions',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None,
anonymous_function: Optional[pulumi.Input[bool]] = None,
any_file: Optional[pulumi.Input[bool]] = None,
catalog: Optional[pulumi.Input[bool]] = None,
cluster_id: Optional[pulumi.Input[str]] = None,
database: Optional[pulumi.Input[str]] = None,
privilege_assignments: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['SqlPermissionsPrivilegeAssignmentArgs']]]]] = None,
table: Optional[pulumi.Input[str]] = None,
view: Optional[pulumi.Input[str]] = None) -> 'SqlPermissions':
"""
Get an existing SqlPermissions resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = _SqlPermissionsState.__new__(_SqlPermissionsState)
__props__.__dict__["anonymous_function"] = anonymous_function
__props__.__dict__["any_file"] = any_file
__props__.__dict__["catalog"] = catalog
__props__.__dict__["cluster_id"] = cluster_id
__props__.__dict__["database"] = database
__props__.__dict__["privilege_assignments"] = privilege_assignments
__props__.__dict__["table"] = table
__props__.__dict__["view"] = view
return SqlPermissions(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="anonymousFunction")
def anonymous_function(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "anonymous_function")
@property
@pulumi.getter(name="anyFile")
def any_file(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "any_file")
@property
@pulumi.getter
def catalog(self) -> pulumi.Output[Optional[bool]]:
return pulumi.get(self, "catalog")
@property
@pulumi.getter(name="clusterId")
def cluster_id(self) -> pulumi.Output[str]:
return pulumi.get(self, "cluster_id")
@property
@pulumi.getter
def database(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "database")
@property
@pulumi.getter(name="privilegeAssignments")
def privilege_assignments(self) -> pulumi.Output[Optional[Sequence['outputs.SqlPermissionsPrivilegeAssignment']]]:
return pulumi.get(self, "privilege_assignments")
@property
@pulumi.getter
def table(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "table")
@property
@pulumi.getter
def view(self) -> pulumi.Output[Optional[str]]:
return pulumi.get(self, "view")
| 40.965147
| 153
| 0.64856
| 1,656
| 15,280
| 5.714976
| 0.084541
| 0.098795
| 0.144548
| 0.083686
| 0.813187
| 0.801352
| 0.782439
| 0.76828
| 0.752642
| 0.699704
| 0
| 0.000086
| 0.236846
| 15,280
| 372
| 154
| 41.075269
| 0.811508
| 0.074935
| 0
| 0.828179
| 1
| 0
| 0.108721
| 0.044033
| 0
| 0
| 0
| 0
| 0
| 1
| 0.161512
| false
| 0.003436
| 0.024055
| 0.082474
| 0.281787
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5da081c5d2918121c5c71a1392ff8bbd8a660540
| 13,202
|
py
|
Python
|
Main.py
|
Luka98122/HomeworkAlarm
|
c87a43758f17a38cd9a1da1125b738c7691807ec
|
[
"MIT"
] | 1
|
2020-10-28T09:11:42.000Z
|
2020-10-28T09:11:42.000Z
|
Main.py
|
Luka98122/HomeworkAlarm
|
c87a43758f17a38cd9a1da1125b738c7691807ec
|
[
"MIT"
] | null | null | null |
Main.py
|
Luka98122/HomeworkAlarm
|
c87a43758f17a38cd9a1da1125b738c7691807ec
|
[
"MIT"
] | null | null | null |
import requests
import pprint
import json
import random
import os
import sys
TEST_MODE = 0
randFileName = str(random.randint(1,2000000000000000)) + ".txt"
FILENAME = 'assigments.txt'
DiffFolder = "Diffs"
if sys.platform.startswith('win32'):
OUTPUT_FOLDER = '\\Users\\luka\\source\\repos\\Homework Alarm\\Diffs'
else:
OUTPUT_FOLDER = '/tmp/homebotSend/1'
outputFile = os.path.join(OUTPUT_FOLDER,randFileName )
print(outputFile)
subjects = {
"Srpski" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d1%81%d1%80%d0%bf%d1%81%d0%ba%d0%b8-%d1%98%d0%b5%d0%b7%d0%b8%d0%ba/",
"Matematika" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d0%bc%d0%b0%d1%82%d0%b5%d0%bc%d0%b0%d1%82%d0%b8%d0%ba%d0%b0/",
"Spanski" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d1%88%d0%bf%d0%b0%d0%bd%d1%81%d0%ba%d0%b8-%d1%98%d0%b5%d0%b7%d0%b8%d0%ba/",
"Istorija" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d0%b8%d1%81%d1%82%d0%be%d1%80%d0%b8%d1%98%d0%b0/",
"Biologija" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d0%b1%d0%b8%d0%be%d0%bb%d0%be%d0%b3%d0%b8%d1%98%d0%b0/",
"Geografija" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d0%b3%d0%b5%d0%be%d0%b3%d1%80%d0%b0%d1%84%d0%b8%d1%98%d0%b0/",
"Likovno" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d0%bb%d0%b8%d0%ba%d0%be%d0%b2%d0%bd%d0%b0-%d0%ba%d1%83%d0%bb%d1%82%d1%83%d1%80%d0%b0/",
"Fizicko" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d1%84%d0%b8%d0%b7%d0%b8%d1%87%d0%ba%d0%be-%d0%b2%d0%b0%d1%81%d0%bf%d0%b8%d1%82%d0%b0%d1%9a%d0%b5/",
"Tehnika" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d1%82%d0%b5%d1%85%d0%bd%d0%b8%d0%ba%d0%b0-%d0%b8-%d1%82%d0%b5%d1%85%d0%bd%d0%be%d0%bb%d0%be%d0%b3%d0%b8%d1%98%d0%b0/",
"Informatika" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d0%b8%d0%bd%d1%84%d0%be%d1%80%d0%bc%d0%b0%d1%82%d0%b8%d0%ba%d0%b0/",
"Engleski" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d0%b5%d0%bd%d0%b3%d0%bb%d0%b5%d1%81%d0%ba%d0%b8-%d1%98%d0%b5%d0%b7%d0%b8%d0%ba/",
"Francuski" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d1%84%d1%80%d0%b0%d0%bd%d1%86%d1%83%d1%81%d0%ba%d0%b8-%d1%98%d0%b5%d0%b7%d0%b8%d0%ba/",
"Muzicko" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d0%bc%d1%83%d0%b7%d0%b8%d1%87%d0%ba%d0%b0-%d0%ba%d1%83%d0%bb%d1%82%d1%83%d1%80%d0%b0/",
"Veronauka" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d0%b2%d0%b5%d1%80%d0%be%d0%bd%d0%b0%d1%83%d0%ba%d0%b0/",
"Gradansko" : "https://podrska.ossmarkovic.edu.rs/v-%d1%80%d0%b0%d0%b7%d1%80%d0%b5%d0%b4-%d0%b3%d1%80%d0%b0%d1%92%d0%b0%d0%bd%d1%81%d0%ba%d0%be/"
}
def debugGetLocalAssignments():
newDict = {'Srpski': ['https://podrska.ossmarkovic.edu.rs/2020/10/14/%d0%b2%d0%b0%d0%b6%d0%bd%d0%be-%d0%be%d0%b1%d0%b0%d0%b2%d0%b5%d1%88%d1%82%d0%b5%d1%9a%d0%b5-%d0%b7%d0%b0-%d1%83%d1%87%d0%b5%d0%bd%d0%b8%d0%ba%d0%b5-5-1-5-2-%d0%b8-5-4-%d0%be%d0%b4%d0%b5%d1%99%d0%b5',
'Важно обавештење за ученике 5/1, 5/2 и 5/4 одељења'],
'Matematika': ['https://podrska.ossmarkovic.edu.rs/2020/10/12/5-15-35-4-%d0%be%d0%b1%d0%b0%d0%b2%d0%b5%d1%88%d1%82%d0%b5%d1%9a%d0%b5-%d0%b7%d0%b0-%d1%83%d1%87%d0%b5%d0%bd%d0%b8%d0%ba%d0%b5-%d0%bd%d0%b0-online-%d0%bd%d0%b0%d1%81%d1%82%d0%b0%d0%b2%d0%b8',
'5-1,5-3,5-4 – Обаешење за ученике на online настави'],
'Spanski': ['https://podrska.ossmakovic.edu.rs/2020/10/13/10-%d0%b8-11-%d1%87%d0%b0%d1%81-%d0%b0%d0%b1%d0%b5%d1%86%d0%b5%d0%b4%d0%b0',
'10. И 11. час Абецеда'],
'Istorija': ['https://podrska.ossmarkovic.edu.rs/2020/10/06/%d0%bf%d1%80%d0%b0%d0%b8%d1%81%d1%82%d0%be%d1%80%d0%b8%d1%98%d0%b0',
'Праисторија'],
'Biologija': ['https://podrska.ossmarkovic.edu.rs/2020/10/08/%d1%85%d1%80%d0%b0%d0%bd%d0%b0-%d0%ba%d0%b0%d0%be-%d0%b8%d0%b7%d0%b2%d0%be%d1%80-%d0%b5%d0%bd%d0%b5%d1%80%d0%b3%d0%b8%d1%98%d0%b5-%d0%b8-%d0%b3%d1%80%d0%b0%d0%b4%d0%b8%d0%b2%d0%bd%d0%b8%d1%85-%d1%81',
'Храна као извор енергије и градивних супстанци'],
'Geografija': ['https://podrska.ossmarkovic.edu.rs/2020/10/06/%d0%be%d0%b1%d0%bb%d0%b8%d0%ba-%d0%b7%d0%b5%d0%bc%d1%99%d0%b5-%d0%b8-%d1%9a%d0%b5%d0%bd%d0%b0-%d1%81%d1%82%d1%80%d1%83%d0%ba%d1%82%d1%83%d1%80%d0%b0',
'Облик Земље и њена структура'],
'Likovno': ['https://podrska.ossmarkovic.edu.rs/2020/09/09/%d1%83%d0%b2%d0%be%d0%b4-%d1%83-%d0%bb%d0%b8%d0%ba%d0%be%d0%b2%d0%bd%d1%83-%d0%ba%d1%83%d0%bb%d1%82%d1%83%d1%80%d1%83',
'Увод у ликовну културу'],
'Fizicko': ['https://podrska.ossmarkovic.edu.rs/2020/09/25/%d0%bf%d1%80%d0%b0%d0%b2%d0%b8%d0%bb%d0%bd%d0%b0-%d0%b8%d1%81%d1%85%d1%80%d0%b0%d0%bd%d0%b0-%d0%b7%d0%b0-%d1%83%d1%87%d0%b5%d0%bd%d0%b8%d0%ba%d0%b5-%d0%be%d0%b4-5-8-%d1%80%d0%b0%d0%b7%d1%80%d0%b5',
'ПРАВИЛНА ИСХРАНА'],
'Tehnika': ['https://podrska.ossmarkovic.edu.rs/2020/10/12/%d1%81%d0%b0%d0%be%d0%b1%d1%80%d0%b0%d1%9b%d0%b0%d1%98-%d0%b2%d1%80%d1%81%d1%82%d0%b5-%d0%b8-%d1%81%d1%82%d1%80%d1%83%d0%ba%d1%82%d1%83%d1%80%d0%b0',
'Саобраћај, врсте и структура'],
'Informatika': ['https://podrska.ossmarkovic.edu.rs/2020/09/21/%d0%bd%d0%b0%d1%81%d1%82%d0%b0%d0%b2%d0%b0-%d0%bd%d0%b0-%d0%b4%d0%b0%d1%99%d0%b8%d0%bd%d1%83-%d0%b8%d0%b7-%d0%b8%d0%bd%d1%84%d0%be%d1%80%d0%bc%d0%b0%d1%82%d0%b8%d0%ba%d0%b5-%d0%b8-%d1%80%d0%b0',
'Настава на даљину из Информатике и рчунараства 5 разред'],
'Engleski': ['https://podrska.ossmarkovic.edu.rs/2020/10/06/%d0%b4%d0%be%d0%bc%d0%b0%d1%9b%d0%b8-%d0%b7%d0%b0%d0%b4%d0%b0%d1%82%d0%b0%d0%ba-%d0%b7%d0%b0-%d0%bf%d0%b5%d1%80%d0%b8%d0%be-5-10-17-10-2020',
'Домаћи задатак за перио 5.10.-17.10.2020.'],
'Francuski': ["Didn't find this", 'No title'],
'Muzicko': ['https://podrska.ossmarkovic.edu.rs/2020/10/10/%d0%bc%d1%83%d0%b7%d0%b8%d0%ba%d0%b0-%d0%bf%d1%80%d0%b0%d0%b8%d1%81%d1%82%d0%be%d1%80%d0%b8%d1%98%d0%b5-%d0%b4%d0%be%d0%bc%d0%b0%d1%9b%d0%b8-%d0%b7%d0%b0%d0%b4%d0%b0%d1%82%d0%b0%d0%ba',
'Музика праисторије-домаћи задатак'],
'Veronauka': ["Didn't find this", 'No title'],
'Gradansko': ['https://podrska.ossmarkovic.edu.rs/2020/10/05/%d0%ba%d0%be%d0%bc%d0%b1%d0%b8%d0%bd%d0%be%d0%b2%d0%b0%d0%bd%d0%b0-%d0%bd%d0%b0%d1%81%d1%82%d0%b0%d0%b2%d0%b0-%d0%bf%d0%b0%d0%bd%d0%b4%d0%b5%d0%bc%d0%b8%d1%98%d0%b0-%d0%b8%d1%81%d0%ba%d1%83',
'Комбинована настава, пандемија – искуства ђака 5/1, 5/2, 5/3, '
'5/4']}
return newDict
def getTitleText (url):
response = requests.get(url)
html = response.text
#FindsOpeningTag
stringSearchedFor = '<h2 class="entry-title fw-400">'
indexOfString = html.find(stringSearchedFor)
if indexOfString == -1:
print("Didnt find this class")
return ["Didn't find this", "No title"]
indexOfClosing = html.find('</h2>')
x = (html[indexOfString+len(stringSearchedFor):indexOfClosing])
beginigLink = x.find("https:")
endLink = x.find('/"')
link = (x[beginigLink:endLink])
titleSearchedFor = 'title="'
titleStart = x.find(titleSearchedFor)
titleEnd = x.find('">')
title = x[titleStart+len(titleSearchedFor):titleEnd]
returns = [link, title]
return returns
def getNewestAssignmentFromAll ():
if TEST_MODE == 1:
return debugGetLocalAssignments() # Don't talk to the server while developing
subjectPrieviews = {}
for subject in subjects.keys():
a = getTitleText(subjects[subject])
subjectPrieviews[subject]= a
return subjectPrieviews
newDict = getNewestAssignmentFromAll()
print("-------------------------------------------------")
def getNewestAssignment (url):
response = requests.get(url)
html = response.text
#FindsOpeningTag
stringSearchedFor = 'class="post-text">'
indexOfString = html.find(stringSearchedFor)
# Find closing tag
indexOfClosing = html.find('</p>', indexOfString)
location = (html[indexOfString+len(stringSearchedFor):indexOfClosing])
if indexOfString == -1:
return "Nema lekcije"
return location.replace("…", "").replace(" ", "")
def dictDiff(oldDict,newDict):
changes = {}
for subject in oldDict.keys():
if oldDict[subject] != newDict[subject]:
changes[subject] = newDict[subject]
return changes
def saveToFile (Filename, objecct):
fp = open(Filename, "w")
json.dump(objecct, fp)
fp.close()
def readFromFile (Filename):
try:
fp = open(Filename, "r")
parsedDict = json.load(fp)
fp.close()
except FileNotFoundError:
return {}
except json.JSONDecodeError:
return {}
return parsedDict
oldDict = {'Srpski': ['https://podrska.ossmarkovic.edu.rs/2020/10/14/%d0%b2%d0%b0%d0%b6%d0%bd%d0%be-%d0%be%d0%b1%d0%b0%d0%b2%d0%b5%d1%88%d1%82%d0%b5%d1%9a%d0%b5-%d0%b7%d0%b0-%d1%83%d1%87%d0%b5%d0%bd%d0%b8%d0%ba%d0%b5-5-1-5-2-%d0%b8-5-4-%d0%be%d0%b4%d0%b5%d1%99%d0%b5',
'Важно обавештење за ученике 5/1, 5/2 и 5/4 одељења'],
'Matematika': ['https://podrska.ossmarkovic.edu.rs/2020/10/12/5-15-35-4-%d0%be%d0%b1%d0%b0%d0%b2%d0%b5%d1%88%d1%82%d0%b5%d1%9a%d0%b5-%d0%b7%d0%b0-%d1%83%d1%87%d0%b5%d0%bd%d0%b8%d0%ba%d0%b5-%d0%bd%d0%b0-online-%d0%bd%d0%b0%d1%81%d1%82%d0%b0%d0%b2%d0%b8',
'5-1,5-3,5-4 – Обавештење за ученике на online настави'],
'Spanski': ['https://podrska.ossmarkovic.edu.rs/2020/10/13/10-%d0%b8-11-%d1%87%d0%b0%d1%81-%d0%b0%d0%b1%d0%b5%d1%86%d0%b5%d0%b4%d0%b0',
'10. И 11. час Абецеда'],
'Istorija': ['https://podrska.ossmarkovic.edu.rs/2020/10/06/%d0%bf%d1%80%d0%b0%d0%b8%d1%81%d1%82%d0%be%d1%80%d0%b8%d1%98%d0%b0',
'Праисторија'],
'Biologija': ['https://podrska.ossmarkovic.edu.rs/2020/10/08/%d1%85%d1%80%d0%b0%d0%bd%d0%b0-%d0%ba%d0%b0%d0%be-%d0%b8%d0%b7%d0%b2%d0%be%d1%80-%d0%b5%d0%bd%d0%b5%d1%80%d0%b3%d0%b8%d1%98%d0%b5-%d0%b8-%d0%b3%d1%80%d0%b0%d0%b4%d0%b8%d0%b2%d0%bd%d0%b8%d1%85-%d1%81',
'Храна као извор енергије и градивних супстанци'],
'Geografija': ['https://podrska.ossmarkovic.edu.rs/2020/10/06/%d0%be%d0%b1%d0%bb%d0%b8%d0%ba-%d0%b7%d0%b5%d0%bc%d1%99%d0%b5-%d0%b8-%d1%9a%d0%b5%d0%bd%d0%b0-%d1%81%d1%82%d1%80%d1%83%d0%ba%d1%82%d1%83%d1%80%d0%b0',
'Облик Земље и њена структура'],
'Likovno': ['https://podrska.ossmarkovic.edu.rs/2020/09/09/%d1%83%d0%b2%d0%be%d0%b4-%d1%83-%d0%bb%d0%b8%d0%ba%d0%be%d0%b2%d0%bd%d1%83-%d0%ba%d1%83%d0%bb%d1%82%d1%83%d1%80%d1%83',
'Увод у ликовну културу'],
'Fizicko': ['https://podrska.ossmarkovic.edu.rs/2020/09/25/%d0%bf%d1%80%d0%b0%d0%b2%d0%b8%d0%bb%d0%bd%d0%b0-%d0%b8%d1%81%d1%85%d1%80%d0%b0%d0%bd%d0%b0-%d0%b7%d0%b0-%d1%83%d1%87%d0%b5%d0%bd%d0%b8%d0%ba%d0%b5-%d0%be%d0%b4-5-8-%d1%80%d0%b0%d0%b7%d1%80%d0%b5',
'ПРАВИЛНА ИСХРАНА'],
'Tehnika': ['https://podrska.ossmarkovic.edu.rs/2020/10/12/%d1%81%d0%b0%d0%be%d0%b1%d1%80%d0%b0%d1%9b%d0%b0%d1%98-%d0%b2%d1%80%d1%81%d1%82%d0%b5-%d0%b8-%d1%81%d1%82%d1%80%d1%83%d0%ba%d1%82%d1%83%d1%80%d0%b0',
'Саобраћај, врсте и структура'],
'Informatika': ['https://podrska.ossmarkovic.edu.rs/2020/09/21/%d0%bd%d0%b0%d1%81%d1%82%d0%b0%d0%b2%d0%b0-%d0%bd%d0%b0-%d0%b4%d0%b0%d1%99%d0%b8%d0%bd%d1%83-%d0%b8%d0%b7-%d0%b8%d0%bd%d1%84%d0%be%d1%80%d0%bc%d0%b0%d1%82%d0%b8%d0%ba%d0%b5-%d0%b8-%d1%80%d0%b0',
'Настава на даљину из Информатике и рачунараства 5 разред'],
'Engleski': ['https://podrska.ossmarkovic.edu.rs/2020/10/06/%d0%b4%d0%be%d0%bc%d0%b0%d1%9b%d0%b8-%d0%b7%d0%b0%d0%b4%d0%b0%d1%82%d0%b0%d0%ba-%d0%b7%d0%b0-%d0%bf%d0%b5%d1%80%d0%b8%d0%be-5-10-17-10-2020',
'Домаћи задатак за перио 5.10.-17.10.2020.'],
'Francuski': ["Didn't find this", 'No title'],
'Muzicko': ['https://podrska.ossmarkovic.edu.rs/2020/10/10/%d0%bc%d1%83%d0%b7%d0%b8%d0%ba%d0%b0-%d0%bf%d1%80%d0%b0%d0%b8%d1%81%d1%82%d0%be%d1%80%d0%b8%d1%98%d0%b5-%d0%b4%d0%be%d0%bc%d0%b0%d1%9b%d0%b8-%d0%b7%d0%b0%d0%b4%d0%b0%d1%82%d0%b0%d0%ba',
'Музика праисторије-домаћи задатак'],
'Veronauka': ["Didn't find this", 'No title'],
'Gradansko': ['https://podrska.ossmarkovic.edu.rs/2020/10/05/%d0%ba%d0%be%d0%bc%d0%b1%d0%b8%d0%bd%d0%be%d0%b2%d0%b0%d0%bd%d0%b0-%d0%bd%d0%b0%d1%81%d1%82%d0%b0%d0%b2%d0%b0-%d0%bf%d0%b0%d0%bd%d0%b4%d0%b5%d0%bc%d0%b8%d1%98%d0%b0-%d0%b8%d1%81%d0%ba%d1%83',
'Комбинована настава, пандемија – искуства ђака 5/1, 5/2, 5/3, '
'5/4']}
oldDict = readFromFile(FILENAME)
if len(oldDict) == 0:
saveToFile(FILENAME, newDict)
oldDict = readFromFile(FILENAME)
theDiff = dictDiff(newDict, oldDict)
if len(theDiff) > 0:
diffFile = open(outputFile, "wt", encoding="utf8")
for subject in theDiff.keys():
diffFile.writelines(subject + "\n")
diffFile.write(theDiff[subject][1] + "\n")
diffFile.write(theDiff[subject][0] + "\n")
diffFile.close()
#print(theDiff)
saveToFile(FILENAME,newDict)
print("-----------------------")
| 65.356436
| 272
| 0.639221
| 2,745
| 13,202
| 3.073224
| 0.090346
| 0.069227
| 0.060455
| 0.039829
| 0.77193
| 0.745377
| 0.726411
| 0.718706
| 0.708037
| 0.697131
| 0
| 0.192233
| 0.106726
| 13,202
| 202
| 273
| 65.356436
| 0.522937
| 0.007802
| 0
| 0.39759
| 0
| 0.246988
| 0.703933
| 0.0084
| 0
| 0
| 0
| 0
| 0
| 1
| 0.042169
| false
| 0
| 0.036145
| 0
| 0.144578
| 0.03012
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5dddeb62119e890eba15390d804a1ac95e9ce9c5
| 24,938
|
py
|
Python
|
tests/common/schedulers/test_slurm_commands.py
|
chenwany/aws-parallelcluster-node
|
a03ccfc02b732be6c31f075f7b95a46cf6aeae20
|
[
"Apache-2.0"
] | null | null | null |
tests/common/schedulers/test_slurm_commands.py
|
chenwany/aws-parallelcluster-node
|
a03ccfc02b732be6c31f075f7b95a46cf6aeae20
|
[
"Apache-2.0"
] | null | null | null |
tests/common/schedulers/test_slurm_commands.py
|
chenwany/aws-parallelcluster-node
|
a03ccfc02b732be6c31f075f7b95a46cf6aeae20
|
[
"Apache-2.0"
] | null | null | null |
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"). You may not use this file except in compliance
# with the License. A copy of the License is located at
#
# http://aws.amazon.com/apache2.0/
#
# or in the "LICENSE.txt" file accompanying this file. This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES
# OR CONDITIONS OF ANY KIND, express or implied. See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from assertpy import assert_that
from common.schedulers.slurm_commands import SlurmJob, get_jobs_info, get_pending_jobs_info
from tests.common import read_text
@pytest.mark.parametrize(
"squeue_mocked_response, expected_output",
[
(
"squeue_output_mix.txt",
[
SlurmJob(
cpus_total=5,
cpus_min_per_node=1,
cpus_per_task=1,
state="PD",
nodes=2,
tasks=5,
id="72",
pending_reason="Resources",
tres_per_job={},
tres_per_task={},
cpus_per_tres={},
),
SlurmJob(
cpus_total=10,
cpus_min_per_node=1,
cpus_per_task=1,
state="R",
nodes=3,
tasks=10,
id="84",
pending_reason="Resources",
tres_per_job={"gpu": 12},
tres_per_task={},
cpus_per_tres={},
),
SlurmJob(
cpus_total=40,
cpus_min_per_node=4,
cpus_per_task=4,
state="PD",
nodes=10,
tasks=10,
id="86",
pending_reason="ReqNodeNotAvail, May be reserved for other job",
tres_per_job={},
tres_per_task={"gpu": 4},
cpus_per_tres={},
),
SlurmJob(
cpus_total=10,
cpus_min_per_node=1,
cpus_per_task=1,
state="PD",
nodes=10,
tasks=10,
id="87",
pending_reason="ReqNodeNotAvail, May be reserved for other job",
tres_per_job={"gpu": 12},
tres_per_task={"gpu": 4},
tres_per_node={"gpu": 6},
cpus_per_tres={},
),
SlurmJob(
cpus_total=15,
cpus_min_per_node=3,
cpus_per_task=3,
state="PD",
nodes=4,
tasks=5,
id="90_1",
pending_reason="PartitionConfig",
tres_per_job={"gpu": 12},
tres_per_task={"gpu": 4},
tres_per_node={"gpu": 6},
cpus_per_tres={},
),
SlurmJob(
cpus_total=15,
cpus_min_per_node=3,
cpus_per_task=3,
state="PD",
nodes=4,
tasks=5,
id="90_2",
pending_reason="PartitionNodeLimit",
tres_per_job={"gpu": 12},
tres_per_task={"gpu": 4},
tres_per_node={"gpu": 6},
cpus_per_tres={"gpu": 5},
),
SlurmJob(
cpus_total=15,
cpus_min_per_node=3,
cpus_per_task=3,
state="PD",
nodes=4,
tasks=5,
id="90_3",
pending_reason="Resources",
tres_per_job={"gpu": 12},
tres_per_task={"gpu": 4},
tres_per_node={"gpu": 6},
cpus_per_tres={"gpu": 5},
),
],
),
(
"squeue_output_extra_column.txt",
[
SlurmJob(
id="72",
state="PD",
nodes=2,
tasks=5,
cpus_total=5,
cpus_min_per_node=1,
cpus_per_task=1,
pending_reason="Resources",
)
],
),
(
"squeue_output_missing_column.txt",
[
SlurmJob(
cpus_total=5,
tres_per_job=None,
cpus_min_per_node=0,
cpus_per_task=1,
tres_per_task=None,
state="",
nodes=2,
tasks=5,
id="72",
pending_reason="Resources",
)
],
),
("squeue_output_empty.txt", []),
],
ids=["mixed_output", "extra_column", "missing_column", "empty"],
)
def test_get_jobs_info(squeue_mocked_response, expected_output, test_datadir, mocker):
qstat_output = read_text(test_datadir / squeue_mocked_response)
mock = mocker.patch(
"common.schedulers.slurm_commands.check_command_output", return_value=qstat_output, autospec=True
)
jobs = get_jobs_info(job_state_filter="PD,R")
mock.assert_called_with(
"/opt/slurm/bin/squeue -r -O 'jobid:200,statecompact:200,numnodes:200,numcpus:200,numtasks:200,"
"cpus-per-task:200,mincpus:200,reason:200,tres-per-job:200,tres-per-task:200,tres-per-node:200,"
"cpus-per-tres:200' --states PD,R"
)
assert_that(jobs).is_equal_to(expected_output)
@pytest.mark.parametrize(
"pending_jobs, instance_properties, max_nodes_filter, filter_by_pending_reasons, expected_output",
[
(
[SlurmJob(id="72", state="PD", nodes=2, cpus_total=5, cpus_min_per_node=1, pending_reason="Priority")],
{"slots": 4, "gpus": 0},
2,
["Priority"],
[SlurmJob(id="72", state="PD", nodes=2, cpus_total=5, cpus_min_per_node=1, pending_reason="Priority")],
),
(
[
SlurmJob(id="72", state="PD", nodes=2, cpus_total=4, cpus_min_per_node=2, pending_reason="Priority"),
SlurmJob(
id="73", state="PD", nodes=2, cpus_total=5, cpus_min_per_node=2, pending_reason="Priority"
), # nodes gets incremented by 1
SlurmJob(id="74", state="PD", nodes=1, cpus_total=2, cpus_min_per_node=1, pending_reason="Priority"),
],
{"slots": 2, "gpus": 0},
2,
["Priority"],
[
SlurmJob(id="72", state="PD", nodes=2, cpus_total=4, cpus_min_per_node=2, pending_reason="Priority"),
SlurmJob(id="74", state="PD", nodes=1, cpus_total=2, cpus_min_per_node=1, pending_reason="Priority"),
],
),
(
[SlurmJob(id="72", state="PD", nodes=2, cpus_total=5, cpus_min_per_node=1, pending_reason="Priority")],
{"slots": 1, "gpus": 0},
1,
["Priority"],
[],
),
(
[SlurmJob(id="72", state="PD", nodes=2, cpus_total=5, cpus_min_per_node=5, pending_reason="Priority")],
{"slots": 4, "gpus": 0},
2,
["Priority"],
[],
),
(
[
SlurmJob(
id="72", state="PD", nodes=2, cpus_total=2, cpus_min_per_node=1, pending_reason="PartitionNodeLimit"
)
],
{"slots": 2, "gpus": 0},
2,
["Priority"],
[],
),
(
[SlurmJob(id="72", state="PD", nodes=4, cpus_total=15, cpus_min_per_node=3, pending_reason="Priority")],
{"slots": 4, "gpus": 0},
5,
[],
[
SlurmJob(
id="72",
state="PD",
nodes=5, # nodes got incremented by 1
cpus_total=15,
cpus_min_per_node=3,
pending_reason="Priority",
)
],
),
(
[SlurmJob(id="72", state="PD", nodes=4, cpus_total=15, cpus_min_per_node=3, pending_reason="Priority")],
{"slots": 4, "gpus": 0},
4,
[],
[],
),
(
[SlurmJob(id="72", state="PD", nodes=4, cpus_total=15, cpus_min_per_node=3, pending_reason="Priority")],
None,
None,
None,
[SlurmJob(id="72", state="PD", nodes=4, cpus_total=15, cpus_min_per_node=3, pending_reason="Priority")],
),
(
[
# sbatch --gpus=3 - no changes required
SlurmJob(
id="1",
state="PD",
nodes=1,
tasks=1,
cpus_per_task=1,
cpus_total=1,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={"gpu": 3},
tres_per_task={},
),
# sbatch --gpus=12 - recompute number of nodes
SlurmJob(
id="2",
state="PD",
nodes=3,
tasks=1,
cpus_per_task=1,
cpus_total=1,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={"gpu": 12},
tres_per_task={},
),
# sbatch --gpus=13 - recompute number of nodes and discard
SlurmJob(
id="3",
state="PD",
nodes=1,
tasks=1,
cpus_per_task=1,
cpus_total=1,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={"gpu": 13},
tres_per_task={},
),
# sbatch --gpus=4 -N 2 - no changes required
SlurmJob(
id="4",
state="PD",
nodes=2,
tasks=1,
cpus_per_task=1,
cpus_total=1,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={"gpu": 4},
tres_per_task={},
),
],
{"slots": 32, "gpus": 4},
3,
["Priority"],
[
SlurmJob(
id="1",
state="PD",
nodes=1,
tasks=1,
cpus_per_task=1,
cpus_total=1,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={"gpu": 3},
tres_per_task={},
),
SlurmJob(
id="2",
state="PD",
nodes=3,
tasks=1,
cpus_per_task=1,
cpus_total=1,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={"gpu": 12},
tres_per_task={},
),
SlurmJob(
id="4",
state="PD",
nodes=2,
tasks=1,
cpus_per_task=1,
cpus_total=1,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={"gpu": 4},
tres_per_task={},
),
],
),
(
[
# sbatch --gpus-per-task=2 -n 2 - no changes required
SlurmJob(
id="1",
state="PD",
nodes=1,
tasks=2,
cpus_per_task=1,
cpus_total=2,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={},
tres_per_task={"gpu": 2},
),
# sbatch --gpus-per-task=2 -n 3 - recompute number of nodes
SlurmJob(
id="2",
state="PD",
nodes=1,
tasks=3,
cpus_per_task=1,
cpus_total=3,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={},
tres_per_task={"gpu": 2},
),
# sbatch --wrap "sleep 100" --gpus-per-task=2 -n 3 -N 3 - no changes required
SlurmJob(
id="3",
state="PD",
nodes=3,
tasks=3,
cpus_per_task=1,
cpus_total=3,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={},
tres_per_task={"gpu": 2},
),
# sbatch --wrap "sleep 100" --gpus-per-task=2 -n 3 -c 22 - no changes required
SlurmJob(
id="4",
state="PD",
nodes=3,
tasks=3,
cpus_per_task=22,
cpus_total=66,
cpus_min_per_node=22,
pending_reason="Priority",
tres_per_job={},
tres_per_task={"gpu": 2},
),
# sbatch --gpus-per-task=2 -n 3 - recompute number of nodes and discard
SlurmJob(
id="5",
state="PD",
nodes=1,
tasks=7,
cpus_per_task=1,
cpus_total=7,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={},
tres_per_task={"gpu": 2},
),
],
{"slots": 32, "gpus": 4},
3,
["Priority"],
[
# sbatch --gpus-per-task=2 -n 2 - no changes required
SlurmJob(
id="1",
state="PD",
nodes=1,
tasks=2,
cpus_per_task=1,
cpus_total=2,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={},
tres_per_task={"gpu": 2},
),
# sbatch --gpus-per-task=2 -n 3 - recompute number of nodes
SlurmJob(
id="2",
state="PD",
nodes=2,
tasks=3,
cpus_per_task=1,
cpus_total=3,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={},
tres_per_task={"gpu": 2},
),
# sbatch --wrap "sleep 100" --gpus-per-task=2 -n 3 -N 3 - no changes required
SlurmJob(
id="3",
state="PD",
nodes=3,
tasks=3,
cpus_per_task=1,
cpus_total=3,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={},
tres_per_task={"gpu": 2},
),
# sbatch --wrap "sleep 100" --gpus-per-task=2 -n 3 -c 22 - no changes required
SlurmJob(
id="4",
state="PD",
nodes=3,
tasks=3,
cpus_per_task=22,
cpus_total=66,
cpus_min_per_node=22,
pending_reason="Priority",
tres_per_job={},
tres_per_task={"gpu": 2},
),
],
),
(
[
# sbatch --gpus-per-task=5 -n 3 - nodes recomputed
SlurmJob(
id="1",
state="PD",
nodes=1,
tasks=3,
cpus_per_task=1,
cpus_total=3,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={},
tres_per_task={"gpu": 5},
)
],
{"slots": 32, "gpus": 8},
3,
["Priority"],
[
# sbatch --gpus-per-task=5 -n 3 - nodes recomputed
SlurmJob(
id="1",
state="PD",
nodes=3,
tasks=3,
cpus_per_task=1,
cpus_total=3,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={},
tres_per_task={"gpu": 5},
)
],
),
(
[
# sbatch --wrap "sleep 100" -n 40 --gpus-per-node=1 - no changes required
SlurmJob(
id="1",
state="PD",
nodes=2,
tasks=40,
cpus_per_task=1,
cpus_total=40,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={},
tres_per_task={},
),
# sbatch --wrap "sleep 100" --gres=gpu:4 -n 2 -c 20 - no changes required
SlurmJob(
id="1",
state="PD",
nodes=2,
tasks=2,
cpus_per_task=20,
cpus_total=40,
cpus_min_per_node=20,
pending_reason="Priority",
tres_per_job={},
tres_per_task={},
),
],
{"slots": 32, "gpus": 4},
3,
["Priority"],
[
# sbatch --wrap "sleep 100" -n 40 --gpus-per-node=1 - no changes required
SlurmJob(
id="1",
state="PD",
nodes=2,
tasks=40,
cpus_per_task=1,
cpus_total=40,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={},
tres_per_task={},
),
# sbatch --wrap "sleep 100" --gres=gpu:4 -n 2 -c 20 - no changes required
SlurmJob(
id="1",
state="PD",
nodes=2,
tasks=2,
cpus_per_task=20,
cpus_total=40,
cpus_min_per_node=20,
pending_reason="Priority",
tres_per_job={},
tres_per_task={},
),
],
),
(
[
# sbatch --wrap "sleep 100" --gpus=4 --gpus-per-node=1 - discarded
SlurmJob(
id="1",
state="PD",
nodes=4,
tasks=1,
cpus_per_task=1,
cpus_total=4,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={"gpu": 4},
tres_per_task={},
),
# sbatch --wrap "sleep 100" --gpus=5 --cpus-per-gpu=15 - recompute number of nodes, recompute cpus_total
SlurmJob(
id="1",
state="PD",
nodes=1,
tasks=1,
cpus_per_task=1,
cpus_total=1,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={"gpu": 5},
tres_per_task={},
cpus_per_tres={"gpu": 15},
),
# sbatch --wrap "sleep 100" --gpus=10 --cpus-per-gpu=10 - discarded
SlurmJob(
id="1",
state="PD",
nodes=1,
tasks=1,
cpus_per_task=1,
cpus_total=1,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={"gpu": 10},
tres_per_task={},
cpus_per_tres={"gpu": 10},
),
# sbatch --wrap "sleep 100" -n 1 -c 33 --gpus=10 --cpus-per-gpu=1 - discarded
SlurmJob(
id="1",
state="PD",
nodes=1,
tasks=1,
cpus_per_task=1,
cpus_total=1,
cpus_min_per_node=33,
pending_reason="Priority",
tres_per_job={"gpu": 10},
tres_per_task={},
cpus_per_tres={"gpu": 1},
),
# sbatch --wrap "sleep 100" --gpus=5 --gpus-per-task=1 - recomputed number of nodes
SlurmJob(
id="2",
state="PD",
nodes=1,
tasks=5,
cpus_per_task=1,
cpus_total=1,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={"gpu": 5},
tres_per_task={"gpu": 1},
),
],
{"slots": 32, "gpus": 4},
3,
["Priority"],
[
# sbatch --wrap "sleep 100" --gpus=4 --cpus-per-gpu=9 - recompute number of nodes, recompute cpus_total
SlurmJob(
id="1",
state="PD",
nodes=3,
tasks=1,
cpus_per_task=1,
cpus_total=75,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={"gpu": 5},
tres_per_task={},
cpus_per_tres={"gpu": 15},
),
# sbatch --wrap "sleep 100" --gpus=5 --gpus-per-task=1 - recomputed number of nodes
SlurmJob(
id="2",
state="PD",
nodes=2,
tasks=5,
cpus_per_task=1,
cpus_total=2,
cpus_min_per_node=1,
pending_reason="Priority",
tres_per_job={"gpu": 5},
tres_per_task={"gpu": 1},
),
],
),
],
ids=[
"single",
"multiple",
"max_nodes",
"max_cpus",
"filter_state",
"additional_node_required",
"discarded_after_node_adjustment",
"no_filters",
"gpus_per_job",
"gpus_per_task",
"gpus_per_task_2",
"gpus_per_node",
"gpus_mix",
],
)
def test_get_pending_jobs_info(
pending_jobs, instance_properties, max_nodes_filter, filter_by_pending_reasons, expected_output, mocker
):
mock = mocker.patch("common.schedulers.slurm_commands.get_jobs_info", return_value=pending_jobs, autospec=True)
pending_jobs = get_pending_jobs_info(instance_properties, max_nodes_filter, filter_by_pending_reasons)
mock.assert_called_with(job_state_filter="PD")
assert_that(pending_jobs).is_equal_to(expected_output)
| 35.473684
| 120
| 0.388524
| 2,327
| 24,938
| 3.889557
| 0.086377
| 0.071152
| 0.058557
| 0.08198
| 0.809966
| 0.781903
| 0.767871
| 0.745001
| 0.72655
| 0.704232
| 0
| 0.049736
| 0.506576
| 24,938
| 702
| 121
| 35.524217
| 0.685819
| 0.093392
| 0
| 0.817352
| 0
| 0.003044
| 0.080815
| 0.02171
| 0
| 0
| 0
| 0
| 0.00761
| 1
| 0.003044
| false
| 0
| 0.006088
| 0
| 0.009132
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5de0639a8b1cfcee1221b4ed93ce4bd69fb7e983
| 29,481
|
py
|
Python
|
tests/test_types.py
|
cputans/pypuppetdb
|
0ff7d72d83a93feba417636112160f3b1e5fd634
|
[
"Apache-2.0"
] | 1
|
2018-07-10T23:56:55.000Z
|
2018-07-10T23:56:55.000Z
|
tests/test_types.py
|
cputans/pypuppetdb
|
0ff7d72d83a93feba417636112160f3b1e5fd634
|
[
"Apache-2.0"
] | null | null | null |
tests/test_types.py
|
cputans/pypuppetdb
|
0ff7d72d83a93feba417636112160f3b1e5fd634
|
[
"Apache-2.0"
] | null | null | null |
import sys
from pypuppetdb.utils import json_to_datetime
from pypuppetdb.types import (
Node, Fact, Resource,
Report, Event, Catalog, Edge,
Inventory
)
if sys.version_info >= (3, 0):
unicode = str
class TestNode(object):
"""Test the Node object."""
def test_without_status(self):
node = Node('_', 'node',
report_timestamp='2013-08-01T09:57:00.000Z',
catalog_timestamp='2013-08-01T09:57:00.000Z',
facts_timestamp='2013-08-01T09:57:00.000Z',)
assert node.name == 'node'
assert node.deactivated is False
assert node.expired is False
assert node.report_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.facts_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.catalog_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert str(node) == str('node')
assert unicode(node) == unicode('node')
assert repr(node) == str('<Node: node>')
def test_with_status_unreported(self):
node = Node('_', 'node',
report_timestamp='2013-08-01T09:57:00.000Z',
catalog_timestamp='2013-08-01T09:57:00.000Z',
facts_timestamp='2013-08-01T09:57:00.000Z',
status_report='unchanged',
unreported=True,
unreported_time='0d 5h 20m',)
assert node.name == 'node'
assert node.deactivated is False
assert node.expired is False
assert node.report_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.facts_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.catalog_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.status == 'unreported'
assert node.unreported_time is '0d 5h 20m'
assert str(node) == str('node')
assert unicode(node) == unicode('node')
assert repr(node) == str('<Node: node>')
def test_with_status_unreported_from_noop(self):
node = Node('_', 'node',
report_timestamp='2013-08-01T09:57:00.000Z',
catalog_timestamp='2013-08-01T09:57:00.000Z',
facts_timestamp='2013-08-01T09:57:00.000Z',
status_report='noop',
unreported=True,
unreported_time='0d 5h 20m',)
assert node.name == 'node'
assert node.deactivated is False
assert node.expired is False
assert node.report_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.facts_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.catalog_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.status == 'unreported'
assert node.unreported_time is '0d 5h 20m'
assert str(node) == str('node')
assert unicode(node) == unicode('node')
assert repr(node) == str('<Node: node>')
def test_with_status_unreported_from_failed(self):
node = Node('_', 'node',
report_timestamp='2013-08-01T09:57:00.000Z',
catalog_timestamp='2013-08-01T09:57:00.000Z',
facts_timestamp='2013-08-01T09:57:00.000Z',
status_report='failed',
unreported=True,
unreported_time='0d 5h 20m',)
assert node.name == 'node'
assert node.deactivated is False
assert node.expired is False
assert node.report_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.facts_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.catalog_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.status == 'unreported'
assert node.unreported_time is '0d 5h 20m'
assert str(node) == str('node')
assert unicode(node) == unicode('node')
assert repr(node) == str('<Node: node>')
def test_apiv4_with_failed_status(self):
node = Node('_', 'node',
status_report='failed',
report_environment='development',
catalog_environment='development',
facts_environment='development',
report_timestamp='2013-08-01T09:57:00.000Z',
catalog_timestamp='2013-08-01T09:57:00.000Z',
facts_timestamp='2013-08-01T09:57:00.000Z',
)
assert node.name == 'node'
assert node.deactivated is False
assert node.expired is False
assert node.report_environment == 'development'
assert node.catalog_environment == 'development'
assert node.facts_environment == 'development'
assert node.report_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.facts_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.catalog_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.status == 'failed'
assert str(node) == str('node')
assert unicode(node) == unicode('node')
assert repr(node) == str('<Node: node>')
def test_apiv4_with_unchanged_status(self):
node = Node('_', 'node',
status_report='unchanged',
report_environment='development',
catalog_environment='development',
facts_environment='development',
report_timestamp='2013-08-01T09:57:00.000Z',
catalog_timestamp='2013-08-01T09:57:00.000Z',
facts_timestamp='2013-08-01T09:57:00.000Z',
)
assert node.name == 'node'
assert node.deactivated is False
assert node.expired is False
assert node.report_environment == 'development'
assert node.catalog_environment == 'development'
assert node.facts_environment == 'development'
assert node.report_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.facts_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.catalog_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.status == 'unchanged'
assert str(node) == str('node')
assert unicode(node) == unicode('node')
assert repr(node) == str('<Node: node>')
def test_apiv4_with_unchanged_noop_status(self):
node = Node('_', 'node',
status_report='unchanged',
noop=True,
noop_pending=False,
report_environment='development',
catalog_environment='development',
facts_environment='development',
report_timestamp='2013-08-01T09:57:00.000Z',
catalog_timestamp='2013-08-01T09:57:00.000Z',
facts_timestamp='2013-08-01T09:57:00.000Z',
)
assert node.name == 'node'
assert node.deactivated is False
assert node.expired is False
assert node.report_environment == 'development'
assert node.catalog_environment == 'development'
assert node.facts_environment == 'development'
assert node.report_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.facts_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.catalog_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.status == 'unchanged'
assert str(node) == str('node')
assert unicode(node) == unicode('node')
assert repr(node) == str('<Node: node>')
def test_apiv4_with_pending_noop_status(self):
node = Node('_', 'node',
status_report='unchanged',
noop=True,
noop_pending=True,
report_environment='development',
catalog_environment='development',
facts_environment='development',
report_timestamp='2013-08-01T09:57:00.000Z',
catalog_timestamp='2013-08-01T09:57:00.000Z',
facts_timestamp='2013-08-01T09:57:00.000Z',
)
assert node.name == 'node'
assert node.deactivated is False
assert node.expired is False
assert node.report_environment == 'development'
assert node.catalog_environment == 'development'
assert node.facts_environment == 'development'
assert node.report_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.facts_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.catalog_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.status == 'noop'
assert str(node) == str('node')
assert unicode(node) == unicode('node')
assert repr(node) == str('<Node: node>')
def test_apiv4_with_failed_noop_status(self):
node = Node('_', 'node',
status_report='failed',
noop=True,
noop_pending=False,
report_environment='development',
catalog_environment='development',
facts_environment='development',
report_timestamp='2013-08-01T09:57:00.000Z',
catalog_timestamp='2013-08-01T09:57:00.000Z',
facts_timestamp='2013-08-01T09:57:00.000Z',
)
assert node.name == 'node'
assert node.deactivated is False
assert node.expired is False
assert node.report_environment == 'development'
assert node.catalog_environment == 'development'
assert node.facts_environment == 'development'
assert node.report_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.facts_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.catalog_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.status == 'failed'
assert str(node) == str('node')
assert unicode(node) == unicode('node')
assert repr(node) == str('<Node: node>')
def test_apiv4_without_status(self):
node = Node('_', 'node',
report_environment='development',
catalog_environment='development',
facts_environment='development',
report_timestamp='2013-08-01T09:57:00.000Z',
catalog_timestamp='2013-08-01T09:57:00.000Z',
facts_timestamp='2013-08-01T09:57:00.000Z',)
assert node.name == 'node'
assert node.deactivated is False
assert node.expired is False
assert node.report_environment == 'development'
assert node.catalog_environment == 'development'
assert node.facts_environment == 'development'
assert node.report_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.facts_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert node.catalog_timestamp == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert str(node) == str('node')
assert unicode(node) == unicode('node')
assert repr(node) == str('<Node: node>')
def test_deactivated(self):
node = Node('_', 'node',
deactivated='2013-08-01T09:57:00.000Z',)
assert node.name == 'node'
assert node.deactivated == \
json_to_datetime('2013-08-01T09:57:00.000Z')
assert str(node) == str('node')
assert unicode(node) == unicode('node')
assert repr(node) == str('<Node: node>')
def test_expired(self):
node = Node('_', 'node',
expired='2013-08-01T09:57:00.000Z',)
assert node.name == 'node'
assert node.expired == json_to_datetime('2013-08-01T09:57:00.000Z')
assert str(node) == str('node')
assert unicode(node) == unicode('node')
assert repr(node) == str('<Node: node>')
def test_with_latest_report_hash(self):
node = Node('_', 'node',
latest_report_hash='hash#1')
assert node.name == 'node'
assert node.latest_report_hash == 'hash#1'
def test_with_cached_catalog_status(self):
node1 = Node('_', 'node', cached_catalog_status='explicitly_requested')
node2 = Node('_', 'node', cached_catalog_status='on_failure')
node3 = Node('_', 'node', cached_catalog_status='not_used')
assert node1.name == 'node'
assert node1.cached_catalog_status == 'explicitly_requested'
assert node2.name == 'node'
assert node2.cached_catalog_status == 'on_failure'
assert node3.name == 'node'
assert node3.cached_catalog_status == 'not_used'
class TestFact(object):
"""Test the Fact object."""
def test_fact(self):
fact = Fact('node', 'osfamily', 'Debian', 'production')
assert fact.node == 'node'
assert fact.name == 'osfamily'
assert fact.value == 'Debian'
assert fact.environment == 'production'
assert str(fact) == str('osfamily/node')
assert unicode(fact) == unicode('osfamily/node')
assert repr(fact) == str('Fact: osfamily/node')
class TestResource(object):
"Test the Resource object."""
def test_resource(self):
resource = Resource('node', '/etc/ssh/sshd_config', 'file',
['class', 'ssh'], False, '/ssh/manifests/init.pp',
15, 'production', parameters={
'ensure': 'present',
'owner': 'root',
'group': 'root',
'mode': '0600',
})
assert resource.node == 'node'
assert resource.name == '/etc/ssh/sshd_config'
assert resource.type_ == 'file'
assert resource.tags == ['class', 'ssh']
assert resource.exported is False
assert resource.sourcefile == '/ssh/manifests/init.pp'
assert resource.sourceline == 15
assert resource.environment == 'production'
assert resource.parameters['ensure'] == 'present'
assert resource.parameters['owner'] == 'root'
assert resource.parameters['group'] == 'root'
assert resource.parameters['mode'] == '0600'
assert str(resource) == str('file[/etc/ssh/sshd_config]')
assert unicode(resource) == unicode('file[/etc/ssh/sshd_config]')
assert repr(resource) == str(
'<Resource: file[/etc/ssh/sshd_config]>')
class TestReport(object):
"""Test the Report object."""
def test_report(self):
report = Report('_', 'node1.puppet.board', 'hash#',
'2013-08-01T09:57:00.000Z',
'2013-08-01T10:57:00.000Z',
'2013-08-01T10:58:00.000Z',
'1351535883', 3, '3.2.1',
'af9f16e3-75f6-4f90-acc6-f83d6524a6f3',
status='success')
assert report.node == 'node1.puppet.board'
assert report.hash_ == 'hash#'
assert report.start == json_to_datetime('2013-08-01T09:57:00.000Z')
assert report.end == json_to_datetime('2013-08-01T10:57:00.000Z')
assert report.received == json_to_datetime('2013-08-01T10:58:00.000Z')
assert report.version == '1351535883'
assert report.format_ == 3
assert report.agent_version == '3.2.1'
assert report.run_time == report.end - report.start
assert report.transaction == 'af9f16e3-75f6-4f90-acc6-f83d6524a6f3'
assert report.status == 'success'
assert str(report) == str('hash#')
assert unicode(report) == unicode('hash#')
assert repr(report) == str('Report: hash#')
def test_report_with_noop(self):
report = Report('_', 'node2.puppet.board', 'hash#',
'2015-08-31T21:07:00.000Z',
'2015-08-31T21:09:00.000Z',
'2015-08-31T21:10:00.000Z',
'1482347613', 4, '4.2.1',
'af9f16e3-75f6-4f90-acc6-f83d6524a6f3',
status='unchanged',
noop=True,
noop_pending=False)
assert report.node == 'node2.puppet.board'
assert report.hash_ == 'hash#'
assert report.start == json_to_datetime('2015-08-31T21:07:00.000Z')
assert report.end == json_to_datetime('2015-08-31T21:09:00.000Z')
assert report.received == json_to_datetime('2015-08-31T21:10:00.000Z')
assert report.version == '1482347613'
assert report.format_ == 4
assert report.agent_version == '4.2.1'
assert report.run_time == report.end - report.start
assert report.transaction == 'af9f16e3-75f6-4f90-acc6-f83d6524a6f3'
assert report.status == 'unchanged'
assert str(report) == str('hash#')
assert unicode(report) == unicode('hash#')
assert repr(report) == str('Report: hash#')
def test_report_with_failed_noop(self):
report = Report('_', 'node2.puppet.board', 'hash#',
'2015-08-31T21:07:00.000Z',
'2015-08-31T21:09:00.000Z',
'2015-08-31T21:10:00.000Z',
'1482347613', 4, '4.2.1',
'af9f16e3-75f6-4f90-acc6-f83d6524a6f3',
status='failed',
noop=True,
noop_pending=False)
assert report.node == 'node2.puppet.board'
assert report.hash_ == 'hash#'
assert report.start == json_to_datetime('2015-08-31T21:07:00.000Z')
assert report.end == json_to_datetime('2015-08-31T21:09:00.000Z')
assert report.received == json_to_datetime('2015-08-31T21:10:00.000Z')
assert report.version == '1482347613'
assert report.format_ == 4
assert report.agent_version == '4.2.1'
assert report.run_time == report.end - report.start
assert report.transaction == 'af9f16e3-75f6-4f90-acc6-f83d6524a6f3'
assert report.status == 'failed'
assert str(report) == str('hash#')
assert unicode(report) == unicode('hash#')
assert repr(report) == str('Report: hash#')
def test_report_with_pending_noop(self):
report = Report('_', 'node2.puppet.board', 'hash#',
'2015-08-31T21:07:00.000Z',
'2015-08-31T21:09:00.000Z',
'2015-08-31T21:10:00.000Z',
'1482347613', 4, '4.2.1',
'af9f16e3-75f6-4f90-acc6-f83d6524a6f3',
status='unchanged',
noop=True,
noop_pending=True)
assert report.node == 'node2.puppet.board'
assert report.hash_ == 'hash#'
assert report.start == json_to_datetime('2015-08-31T21:07:00.000Z')
assert report.end == json_to_datetime('2015-08-31T21:09:00.000Z')
assert report.received == json_to_datetime('2015-08-31T21:10:00.000Z')
assert report.version == '1482347613'
assert report.format_ == 4
assert report.agent_version == '4.2.1'
assert report.run_time == report.end - report.start
assert report.transaction == 'af9f16e3-75f6-4f90-acc6-f83d6524a6f3'
assert report.status == 'noop'
assert str(report) == str('hash#')
assert unicode(report) == unicode('hash#')
assert repr(report) == str('Report: hash#')
def test_report_with_cataloguuid_codeid(self):
report = Report('_', 'node2.puppet.board', 'hash#',
'2015-08-31T21:07:00.000Z',
'2015-08-31T21:09:00.000Z',
'2015-08-31T21:10:00.000Z',
'1482347613', 4, '4.2.1',
'af9f16e3-75f6-4f90-acc6-f83d6524a6f3',
code_id=None,
catalog_uuid="0b3a4943-a164-4cea-bbf0-91d0ee931326",
cached_catalog_status="not_used")
assert report.node == 'node2.puppet.board'
assert report.hash_ == 'hash#'
assert report.start == json_to_datetime('2015-08-31T21:07:00.000Z')
assert report.end == json_to_datetime('2015-08-31T21:09:00.000Z')
assert report.received == json_to_datetime('2015-08-31T21:10:00.000Z')
assert report.version == '1482347613'
assert report.format_ == 4
assert report.agent_version == '4.2.1'
assert report.run_time == report.end - report.start
assert report.transaction == 'af9f16e3-75f6-4f90-acc6-f83d6524a6f3'
assert report.catalog_uuid == "0b3a4943-a164-4cea-bbf0-91d0ee931326"
assert report.cached_catalog_status == "not_used"
assert str(report) == str('hash#')
assert unicode(report) == unicode('hash#')
assert repr(report) == str('Report: hash#')
def test_report_with_producer(self):
report = Report('_', "test.test.com", "hash#",
'2015-08-31T21:07:00.000Z',
'2015-08-31T21:09:00.000Z',
'2015-08-31T21:10:00.000Z',
'1482347613', 4, '4.2.1',
'af9f16e3-75f6-4f90-acc6-f83d6524a6f3',
producer="puppet01.test.com")
assert report.node == "test.test.com"
assert report.hash_ == 'hash#'
assert report.start == json_to_datetime('2015-08-31T21:07:00.000Z')
assert report.end == json_to_datetime('2015-08-31T21:09:00.000Z')
assert report.received == json_to_datetime('2015-08-31T21:10:00.000Z')
assert report.version == '1482347613'
assert report.format_ == 4
assert report.agent_version == '4.2.1'
assert report.run_time == report.end - report.start
assert report.producer == "puppet01.test.com"
assert str(report) == str('hash#')
assert unicode(report) == unicode('hash#')
assert repr(report) == str('Report: hash#')
class TestEvent(object):
"""Test the Event object."""
def test_event(self):
event = Event('node', 'failure', '2013-08-01T10:57:00.000Z',
'hash#', '/etc/ssh/sshd_config', 'ensure',
'Nothing to say', 'present', 'absent', 'file',
'Ssh::Server',
['Stage[main]', 'Ssh::Server',
'File[/etc/ssh/sshd_config]'],
'/etc/puppet/modules/ssh/manifests/server.pp', 80)
assert event.node == 'node'
assert event.status == 'failure'
assert event.failed is True
assert event.timestamp == json_to_datetime('2013-08-01T10:57:00.000Z')
assert event.hash_ == 'hash#'
assert event.item['title'] == '/etc/ssh/sshd_config'
assert event.item['type'] == 'file'
assert event.item['property'] == 'ensure'
assert event.item['message'] == 'Nothing to say'
assert event.item['old'] == 'absent'
assert event.item['new'] == 'present'
assert str(event) == str('file[/etc/ssh/sshd_config]/hash#')
assert unicode(event) == unicode('file[/etc/ssh/sshd_config]/hash#')
assert repr(event) == str('Event: file[/etc/ssh/sshd_config]/hash#')
def test_event_failed(self):
event = Event('node', 'success', '2013-08-01T10:57:00.000Z',
'hash#', '/etc/ssh/sshd_config', 'ensure',
'Nothing to say', 'present', 'absent', 'file',
'Ssh::Server',
['Stage[main]', 'Ssh::Server',
'File[/etc/ssh/sshd_config]'],
'/etc/puppet/modules/ssh/manifests/server.pp', 80)
assert event.status == 'success'
assert event.failed is False
class TestCatalog(object):
"""Test the Catalog object."""
def test_catalog(self):
catalog = Catalog('node', [], [], 'unique', None)
assert catalog.node == 'node'
assert catalog.version == 'unique'
assert catalog.transaction_uuid is None
assert catalog.resources == {}
assert catalog.edges == []
assert str(catalog) == str('node/None')
assert unicode(catalog) == unicode('node/None')
assert repr(catalog) == str(
'<Catalog: node/None>')
def test_catalog_codeid(self):
catalog = Catalog('node', [], [], 'unique', None,
code_id='somecodeid')
assert catalog.node == 'node'
assert catalog.version == 'unique'
assert catalog.transaction_uuid is None
assert catalog.resources == {}
assert catalog.edges == []
assert str(catalog) == str('node/None')
assert unicode(catalog) == unicode('node/None')
assert repr(catalog) == str(
'<Catalog: node/None>')
assert catalog.code_id == 'somecodeid'
def test_catalog_uuid(self):
catalog = Catalog('node', [], [], 'unique', None,
catalog_uuid='univerallyuniqueidentifier')
assert catalog.node == 'node'
assert catalog.version == 'unique'
assert catalog.transaction_uuid is None
assert catalog.resources == {}
assert catalog.edges == []
assert str(catalog) == str('node/None')
assert unicode(catalog) == unicode('node/None')
assert repr(catalog) == str(
'<Catalog: node/None>')
assert catalog.catalog_uuid == 'univerallyuniqueidentifier'
def test_catalog_producer(self):
catalog = Catalog('node', [], [], 'unique', None,
producer="puppet01.test.com")
assert catalog.node == 'node'
assert catalog.version == 'unique'
assert catalog.transaction_uuid is None
assert catalog.resources == {}
assert catalog.edges == []
assert catalog.producer == 'puppet01.test.com'
assert str(catalog) == str('node/None')
assert unicode(catalog) == unicode('node/None')
assert repr(catalog) == str(
'<Catalog: node/None>')
class TestEdge(object):
"""Test the Edge object."""
def test_edge(self):
resource_a = Resource('node', '/etc/ssh/sshd_config', 'file',
['class', 'ssh'], False,
'/ssh/manifests/init.pp', 15, 'production',
parameters={})
resource_b = Resource('node', 'sshd', 'service',
['class', 'ssh'], False,
'/ssh/manifests/init.pp', 30, 'production',
parameters={})
edge = Edge(resource_a, resource_b, 'notify')
assert edge.source == resource_a
assert edge.target == resource_b
assert edge.relationship == 'notify'
assert str(edge) == str(
'file[/etc/ssh/sshd_config] - notify - service[sshd]')
assert unicode(edge) == unicode(
'file[/etc/ssh/sshd_config] - notify - service[sshd]')
assert repr(edge) == str(
'<Edge: file[/etc/ssh/sshd_config] - notify - service[sshd]>')
class TestInventory(object):
def test_inventory(self):
inv = Inventory(node="test1.test.com",
environment="production",
time='2016-08-18T21:00:00.000Z',
facts={
"hostname": "test1.test.com",
"domain": "test.com",
"puppetversion": "4.6.0"
},
trusted={
"authenticated": "remote",
"domain": "test.com",
"certname": "test1.test.com",
"extensions": {},
"hostname": "test1"
})
assert inv.node == "test1.test.com"
assert inv.environment == "production"
assert inv.time == json_to_datetime('2016-08-18T21:00:00.000Z')
assert inv.facts == {
"hostname": "test1.test.com",
"domain": "test.com",
"puppetversion": "4.6.0"
}
assert inv.trusted == {
"authenticated": "remote",
"domain": "test.com",
"certname": "test1.test.com",
"extensions": {},
"hostname": "test1"
}
assert str(inv) == str("test1.test.com")
assert unicode(inv) == unicode("test1.test.com")
assert repr(inv) == str("<Inventory: test1.test.com>")
| 43.740356
| 79
| 0.556019
| 3,218
| 29,481
| 4.964264
| 0.058421
| 0.039437
| 0.035556
| 0.053709
| 0.826228
| 0.806385
| 0.770391
| 0.756682
| 0.741596
| 0.736213
| 0
| 0.112856
| 0.308707
| 29,481
| 673
| 80
| 43.805349
| 0.671001
| 0.005529
| 0
| 0.698835
| 0
| 0
| 0.229352
| 0.120049
| 0
| 0
| 0
| 0
| 0.512479
| 1
| 0.049917
| false
| 0
| 0.004992
| 0
| 0.06822
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
b90b662911d5ca8ec157b652df0762a20a5ab63c
| 539
|
py
|
Python
|
tests/test_app/models.py
|
Instawork/django-admin-fast-search
|
d3ba7b0fa2e92e9564069bbb703420f35dae2c7e
|
[
"MIT"
] | 2
|
2022-01-26T22:16:38.000Z
|
2022-01-31T10:07:06.000Z
|
tests/test_app/models.py
|
Instawork/django-admin-fast-search
|
d3ba7b0fa2e92e9564069bbb703420f35dae2c7e
|
[
"MIT"
] | null | null | null |
tests/test_app/models.py
|
Instawork/django-admin-fast-search
|
d3ba7b0fa2e92e9564069bbb703420f35dae2c7e
|
[
"MIT"
] | null | null | null |
from django.db import models
# Put your test models here
class TestModel1(models.Model):
name = models.CharField(max_length=255)
email = models.EmailField()
phonenumber = models.CharField(max_length=20)
class TestModel2(models.Model):
name = models.CharField(max_length=255)
email = models.EmailField()
phonenumber = models.CharField(max_length=20)
class TestModel3(models.Model):
name = models.CharField(max_length=255)
email = models.EmailField()
phonenumber = models.CharField(max_length=20)
| 23.434783
| 49
| 0.734694
| 67
| 539
| 5.820896
| 0.343284
| 0.230769
| 0.276923
| 0.369231
| 0.794872
| 0.794872
| 0.794872
| 0.794872
| 0.794872
| 0.794872
| 0
| 0.039823
| 0.16141
| 539
| 22
| 50
| 24.5
| 0.823009
| 0.046382
| 0
| 0.692308
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.076923
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 9
|
5d2745d633086d3fe846e4cc02529eb38a539ca5
| 157
|
py
|
Python
|
bioimageio/core/prediction_pipeline/_model_adapters/__init__.py
|
bioimage-io/python-core
|
78fa57fd0af968b57c0df8f294faf3a8b502909e
|
[
"MIT"
] | 2
|
2021-10-01T19:20:48.000Z
|
2021-11-29T15:44:27.000Z
|
bioimageio/core/prediction_pipeline/_model_adapters/__init__.py
|
bioimage-io/python-core
|
78fa57fd0af968b57c0df8f294faf3a8b502909e
|
[
"MIT"
] | 98
|
2021-09-22T08:44:40.000Z
|
2022-03-30T17:21:55.000Z
|
bioimageio/core/prediction_pipeline/_model_adapters/__init__.py
|
bioimage-io/python-core
|
78fa57fd0af968b57c0df8f294faf3a8b502909e
|
[
"MIT"
] | 2
|
2019-12-05T14:57:35.000Z
|
2021-05-18T12:33:00.000Z
|
from ._model_adapter import ModelAdapter, create_model_adapter, get_weight_formats
__all__ = ["ModelAdapter", "create_model_adapter", "get_weight_formats"]
| 39.25
| 82
| 0.834395
| 19
| 157
| 6.157895
| 0.526316
| 0.307692
| 0.393162
| 0.512821
| 0.786325
| 0.786325
| 0.786325
| 0
| 0
| 0
| 0
| 0
| 0.076433
| 157
| 3
| 83
| 52.333333
| 0.806897
| 0
| 0
| 0
| 0
| 0
| 0.318471
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 8
|
5d28295e3a0c09d45c0327b2a52c19e125397bc8
| 9,063
|
py
|
Python
|
tests/helpers/test_entityfilter.py
|
andersop91/core
|
0e0ef0aa17073609eae7c974cf4c73306b7c414b
|
[
"Apache-2.0"
] | 22,481
|
2020-03-02T13:09:59.000Z
|
2022-03-31T23:34:28.000Z
|
tests/helpers/test_entityfilter.py
|
andersop91/core
|
0e0ef0aa17073609eae7c974cf4c73306b7c414b
|
[
"Apache-2.0"
] | 31,101
|
2020-03-02T13:00:16.000Z
|
2022-03-31T23:57:36.000Z
|
tests/helpers/test_entityfilter.py
|
andersop91/core
|
0e0ef0aa17073609eae7c974cf4c73306b7c414b
|
[
"Apache-2.0"
] | 11,411
|
2020-03-02T14:19:20.000Z
|
2022-03-31T22:46:07.000Z
|
"""The tests for the EntityFilter component."""
from homeassistant.helpers.entityfilter import (
FILTER_SCHEMA,
INCLUDE_EXCLUDE_FILTER_SCHEMA,
generate_filter,
)
def test_no_filters_case_1():
"""If include and exclude not included, pass everything."""
incl_dom = {}
incl_ent = {}
excl_dom = {}
excl_ent = {}
testfilter = generate_filter(incl_dom, incl_ent, excl_dom, excl_ent)
for value in ("sensor.test", "sun.sun", "light.test"):
assert testfilter(value)
def test_includes_only_case_2():
"""If include specified, only pass if specified (Case 2)."""
incl_dom = {"light", "sensor"}
incl_ent = {"binary_sensor.working"}
excl_dom = {}
excl_ent = {}
testfilter = generate_filter(incl_dom, incl_ent, excl_dom, excl_ent)
assert testfilter("sensor.test")
assert testfilter("light.test")
assert testfilter("binary_sensor.working")
assert testfilter("binary_sensor.notworking") is False
assert testfilter("sun.sun") is False
def test_includes_only_with_glob_case_2():
"""If include specified, only pass if specified (Case 2)."""
incl_dom = {"light", "sensor"}
incl_glob = {"cover.*_window"}
incl_ent = {"binary_sensor.working"}
excl_dom = {}
excl_glob = {}
excl_ent = {}
testfilter = generate_filter(
incl_dom, incl_ent, excl_dom, excl_ent, incl_glob, excl_glob
)
assert testfilter("sensor.test")
assert testfilter("light.test")
assert testfilter("cover.bedroom_window")
assert testfilter("binary_sensor.working")
assert testfilter("binary_sensor.notworking") is False
assert testfilter("sun.sun") is False
assert testfilter("cover.garage_door") is False
def test_excludes_only_case_3():
"""If exclude specified, pass all but specified (Case 3)."""
incl_dom = {}
incl_ent = {}
excl_dom = {"light", "sensor"}
excl_ent = {"binary_sensor.working"}
testfilter = generate_filter(incl_dom, incl_ent, excl_dom, excl_ent)
assert testfilter("sensor.test") is False
assert testfilter("light.test") is False
assert testfilter("binary_sensor.working") is False
assert testfilter("binary_sensor.another")
assert testfilter("sun.sun") is True
def test_excludes_only_with_glob_case_3():
"""If exclude specified, pass all but specified (Case 3)."""
incl_dom = {}
incl_glob = {}
incl_ent = {}
excl_dom = {"light", "sensor"}
excl_glob = {"cover.*_window"}
excl_ent = {"binary_sensor.working"}
testfilter = generate_filter(
incl_dom, incl_ent, excl_dom, excl_ent, incl_glob, excl_glob
)
assert testfilter("sensor.test") is False
assert testfilter("light.test") is False
assert testfilter("cover.bedroom_window") is False
assert testfilter("binary_sensor.working") is False
assert testfilter("binary_sensor.another")
assert testfilter("sun.sun") is True
assert testfilter("cover.garage_door")
def test_with_include_domain_case4a():
"""Test case 4a - include and exclude specified, with included domain."""
incl_dom = {"light", "sensor"}
incl_ent = {"binary_sensor.working"}
excl_dom = {}
excl_ent = {"light.ignoreme", "sensor.notworking"}
testfilter = generate_filter(incl_dom, incl_ent, excl_dom, excl_ent)
assert testfilter("sensor.test")
assert testfilter("sensor.notworking") is False
assert testfilter("light.test")
assert testfilter("light.ignoreme") is False
assert testfilter("binary_sensor.working")
assert testfilter("binary_sensor.another") is False
assert testfilter("sun.sun") is False
def test_with_include_glob_case4a():
"""Test case 4a - include and exclude specified, with included glob."""
incl_dom = {}
incl_glob = {"light.*", "sensor.*"}
incl_ent = {"binary_sensor.working"}
excl_dom = {}
excl_glob = {}
excl_ent = {"light.ignoreme", "sensor.notworking"}
testfilter = generate_filter(
incl_dom, incl_ent, excl_dom, excl_ent, incl_glob, excl_glob
)
assert testfilter("sensor.test")
assert testfilter("sensor.notworking") is False
assert testfilter("light.test")
assert testfilter("light.ignoreme") is False
assert testfilter("binary_sensor.working")
assert testfilter("binary_sensor.another") is False
assert testfilter("sun.sun") is False
def test_with_include_domain_glob_filtering_case4a():
"""Test case 4a - include and exclude specified, both have domains and globs."""
incl_dom = {"light"}
incl_glob = {"*working"}
incl_ent = {}
excl_dom = {"binary_sensor"}
excl_glob = {"*notworking"}
excl_ent = {"light.ignoreme"}
testfilter = generate_filter(
incl_dom, incl_ent, excl_dom, excl_ent, incl_glob, excl_glob
)
assert testfilter("sensor.working")
assert testfilter("sensor.notworking") is False
assert testfilter("light.test")
assert testfilter("light.notworking") is False
assert testfilter("light.ignoreme") is False
assert testfilter("binary_sensor.not_working") is False
assert testfilter("binary_sensor.another") is False
assert testfilter("sun.sun") is False
def test_exclude_domain_case4b():
"""Test case 4b - include and exclude specified, with excluded domain."""
incl_dom = {}
incl_ent = {"binary_sensor.working"}
excl_dom = {"binary_sensor"}
excl_ent = {"light.ignoreme", "sensor.notworking"}
testfilter = generate_filter(incl_dom, incl_ent, excl_dom, excl_ent)
assert testfilter("sensor.test")
assert testfilter("sensor.notworking") is False
assert testfilter("light.test")
assert testfilter("light.ignoreme") is False
assert testfilter("binary_sensor.working")
assert testfilter("binary_sensor.another") is False
assert testfilter("sun.sun") is True
def test_exclude_glob_case4b():
"""Test case 4b - include and exclude specified, with excluded glob."""
incl_dom = {}
incl_glob = {}
incl_ent = {"binary_sensor.working"}
excl_dom = {}
excl_glob = {"binary_sensor.*"}
excl_ent = {"light.ignoreme", "sensor.notworking"}
testfilter = generate_filter(
incl_dom, incl_ent, excl_dom, excl_ent, incl_glob, excl_glob
)
assert testfilter("sensor.test")
assert testfilter("sensor.notworking") is False
assert testfilter("light.test")
assert testfilter("light.ignoreme") is False
assert testfilter("binary_sensor.working")
assert testfilter("binary_sensor.another") is False
assert testfilter("sun.sun") is True
def test_no_domain_case4c():
"""Test case 4c - include and exclude specified, with no domains."""
incl_dom = {}
incl_ent = {"binary_sensor.working"}
excl_dom = {}
excl_ent = {"light.ignoreme", "sensor.notworking"}
testfilter = generate_filter(incl_dom, incl_ent, excl_dom, excl_ent)
assert testfilter("sensor.test") is False
assert testfilter("sensor.notworking") is False
assert testfilter("light.test") is False
assert testfilter("light.ignoreme") is False
assert testfilter("binary_sensor.working")
assert testfilter("binary_sensor.another") is False
assert testfilter("sun.sun") is False
def test_filter_schema_empty():
"""Test filter schema."""
conf = {}
filt = FILTER_SCHEMA(conf)
conf.update(
{
"include_domains": [],
"include_entities": [],
"exclude_domains": [],
"exclude_entities": [],
"include_entity_globs": [],
"exclude_entity_globs": [],
}
)
assert filt.config == conf
assert filt.empty_filter
def test_filter_schema():
"""Test filter schema."""
conf = {
"include_domains": ["light"],
"include_entities": ["switch.kitchen"],
"exclude_domains": ["cover"],
"exclude_entities": ["light.kitchen"],
}
filt = FILTER_SCHEMA(conf)
conf.update({"include_entity_globs": [], "exclude_entity_globs": []})
assert filt.config == conf
assert not filt.empty_filter
def test_filter_schema_with_globs():
"""Test filter schema with glob options."""
conf = {
"include_domains": ["light"],
"include_entity_globs": ["sensor.kitchen_*"],
"include_entities": ["switch.kitchen"],
"exclude_domains": ["cover"],
"exclude_entity_globs": ["sensor.weather_*"],
"exclude_entities": ["light.kitchen"],
}
filt = FILTER_SCHEMA(conf)
assert filt.config == conf
assert not filt.empty_filter
def test_filter_schema_include_exclude():
"""Test the include exclude filter schema."""
conf = {
"include": {
"domains": ["light"],
"entity_globs": ["sensor.kitchen_*"],
"entities": ["switch.kitchen"],
},
"exclude": {
"domains": ["cover"],
"entity_globs": ["sensor.weather_*"],
"entities": ["light.kitchen"],
},
}
filt = INCLUDE_EXCLUDE_FILTER_SCHEMA(conf)
assert filt.config == conf
assert not filt.empty_filter
| 33.319853
| 84
| 0.669425
| 1,084
| 9,063
| 5.354244
| 0.077491
| 0.187457
| 0.071675
| 0.126809
| 0.857857
| 0.812715
| 0.788766
| 0.766196
| 0.722088
| 0.706926
| 0
| 0.002909
| 0.203465
| 9,063
| 271
| 85
| 33.442804
| 0.80108
| 0.092574
| 0
| 0.668224
| 1
| 0
| 0.256294
| 0.076016
| 0
| 0
| 0
| 0
| 0.35514
| 1
| 0.070093
| false
| 0
| 0.004673
| 0
| 0.074766
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
5d31737f57a96d6f3464017fdff456892459b783
| 16,706
|
py
|
Python
|
pyalp/protocol.py
|
BaptisteLefebvre/pyalp
|
05cb8ff9e66f95ed9c70a8ab8a91c78794f7350a
|
[
"MIT"
] | 1
|
2020-11-09T09:23:11.000Z
|
2020-11-09T09:23:11.000Z
|
pyalp/protocol.py
|
BaptisteLefebvre/pyalp
|
05cb8ff9e66f95ed9c70a8ab8a91c78794f7350a
|
[
"MIT"
] | null | null | null |
pyalp/protocol.py
|
BaptisteLefebvre/pyalp
|
05cb8ff9e66f95ed9c70a8ab8a91c78794f7350a
|
[
"MIT"
] | 1
|
2020-11-09T09:23:19.000Z
|
2020-11-09T09:23:19.000Z
|
import os
import pprint
import time
import pyalp as alp
import pyalp.utils
class Protocol(object):
"""TODO add doc...
TODO complete...
"""
def __init__(self):
pass
def project(self, device):
raise NotImplementedError()
class White(Protocol):
"""TODO add doc...
TODO complete...
"""
def __init__(self, rate=50.0, nb_repetitions=None, infinite_loop=False):
Protocol.__init__(self)
self.rate = rate # Hz (frame rate)
self.picture_time = int(1.0e6 / self.rate) # us (time between the start of two consecutive pictures)
self.nb_repetitions = nb_repetitions
self.infinite_loop = infinite_loop
def project(self, device):
device.control_projection(inversion=True)
sequence = alp.sequence.White()
# Set up sequence
device.allocate(sequence)
if self.nb_repetitions is not None and not self.infinite_loop:
device.control_repetitions(sequence, self.nb_repetitions)
# TODO check if timing management is correct...
device.timing(sequence)
device.put(sequence) # TODO check why in Vialux's example put takes place before timing (no control)
if __debug__:
settings = sequence.inquire_settings(device)
print("White sequence's settings:")
pprint.pprint(settings)
# Start sequence
device.start(sequence, infinite_loop=self.infinite_loop)
# Wait sequence end
device.wait(infinite_loop=self.infinite_loop)
# Clean sequence
device.free(sequence)
return
class Black(Protocol):
"""TODO add doc...
TODO complete...
"""
def __init__(self, rate=50.0, nb_repetitions=None, infinite_loop=False):
Protocol.__init__(self)
self.rate = rate # Hz (frame rate)
self.picture_time = int(1.0e6 / self.rate) # us (time between the start of two consecutive pictures)
self.nb_repetitions = nb_repetitions
self.infinite_loop = infinite_loop
def project(self, device):
device.control_projection(inversion=True)
sequence = alp.sequence.Black()
# Set up sequence
device.allocate(sequence)
if self.nb_repetitions is not None and not self.infinite_loop:
device.control_repetitions(sequence, self.nb_repetitions)
# TODO check if timing management is correct...
device.timing(sequence)
device.put(sequence) # TODO check why in Vialux's example put takes place before timing (no control)
if __debug__:
settings = sequence.inquire_settings(device)
print("Black sequence's settings:")
pprint.pprint(settings)
# Start sequence
device.start(sequence, infinite_loop=self.infinite_loop)
# Wait sequence end
device.wait(infinite_loop=self.infinite_loop)
# Clean sequence
device.free(sequence)
return
class BlackWhite(Protocol):
"""TODO add doc...
TODO complete...
"""
def __init__(self, rate=50.0, nb_repetitions=None, infinite_loop=False):
Protocol.__init__(self)
self.rate = rate # Hz (frame rate)
self.picture_time = int(1.0e6 / self.rate) # us (time between the start of two consecutive pictures)
self.nb_repetitions = nb_repetitions
self.infinite_loop = infinite_loop
def project(self, device):
device.control_projection(inversion=True)
sequence = alp.sequence.BlackWhite()
# Set up sequence
device.allocate(sequence)
if self.nb_repetitions is not None and not self.infinite_loop:
device.control_repetitions(sequence, self.nb_repetitions)
# TODO manage timing...
# device.timing(sequence)
device.put(sequence) # TODO check why in Vialux's example put takes place before timing (no control)
# Start sequence
device.start(sequence, infinite_loop=self.infinite_loop)
# Wait sequence end
device.wait(infinite_loop=self.infinite_loop)
# Clean sequence
device.free(sequence)
return
class Checkerboard(Protocol):
"""TODO add doc...
TODO complete...
"""
def __init__(self, rate=30.0, square_size=20, checkerboard_size=5, nb_repetitions=10, interactive=True):
Protocol.__init__(self)
if interactive: # prompt input parameters
print("# Checkerboard stimulus")
rate = alp.utils.input("Enter the frame rate [Hz] (e.g. {}): ".format(rate), float)
prompt = "Number of pixels to make one side of a single check (e.g. {}): ".format(square_size)
square_size = alp.utils.input(prompt, int)
prompt = "Number of checks to make one side of the checkerboard (e.g. {}): ".format(checkerboard_size)
checkerboard_size = alp.utils.input(prompt, int)
nb_repetitions = alp.utils.input("Enter the number of repetitions (e.g. {}): ".format(nb_repetitions), int)
self.rate = rate # Hz
self.picture_time = int(1.0e6 / self.rate) # us
self.square_size = square_size # px
self.nb_repetitions = nb_repetitions
self.checkerboard_size = checkerboard_size * self.square_size # px
@staticmethod
def wait(device, sleep_duration=30.0e-3):
"""TODO add doc..."""
queue_info = device.inquire_projection('progress')
while queue_info.nWaitingSequences == 1:
queue_info = device.inquire_projection('progress')
time.sleep(sleep_duration)
device.control_projection(reset_queue=True)
return
def project(self, device):
"""Project checkerboard protocol"""
device.control_projection(inversion=False, queue_mode=True)
# TODO check if queue mode toggle should come after allocations...
sequence_1 = alp.sequence.Checkerboard(seed=42)
sequence_2 = alp.sequence.Checkerboard(seed=None)
# Setup first sequence
device.allocate(sequence_1)
# TODO manage control...
# device.control(sequence_1)
# TODO manage timing...
# device.timing(sequence_1)
# Setup second sequence
device.allocate(sequence_2)
# TODO manage control...
# device.control(sequence_2)
# TODO manage timing...
# device.timing(sequence_2)
# Start first sequence
print("Start sequence 1")
device.put(sequence_1)
device.start(sequence_1)
# Start second sequence
print("Start sequence 2")
device.put(sequence_2)
device.start(sequence_2)
# For each repetition
print("Start infernal loop")
for rep in range(0, self.nb_repetitions):
# Wait end of first sequence
self.wait(device)
# Manage first sequence
device.free(sequence_1)
sequence_1 = alp.sequence.Checkerboard(seed=42)
device.allocate(sequence_1)
# TODO manage timing...
# device.timing(sequence_1)
device.put(sequence_1)
device.start(sequence_1)
# Wait end of second sequence
self.wait(device)
# Manage second sequence
device.free(sequence_2)
sequence_2 = alp.sequence.Checkerboard(seed=None)
device.allocate(sequence_2)
# TODO manage timing...
# device.timing(sequence_2)
device.put(sequence_2)
device.start(sequence_2)
# Wait end of first sequence
self.wait(device)
# Clean first sequence
device.free(sequence_1)
# Wait end of second sequence
device.wait()
# Clean second sequence
device.free(sequence_2)
return
class FullField(Protocol):
"""TODO add doc...
TODO complete...
"""
def __init__(self, footprint_array, rate=500.0, nb_repetitions=10, infinite_loop=False):
Protocol.__init__(self)
self.rate = rate # Hz
self.picture_time = int(1.0e6 / self.rate) # us
self.infinite_loop = infinite_loop
self.nb_repetitions = nb_repetitions
self.footprint_array = footprint_array
def project(self, device):
device.control_projection(inversion=True)
sequence = alp.sequence.FullField(self.footprint_array)
# Set up sequence
device.allocate(sequence)
if self.nb_repetitions is not None and not self.infinite_loop:
device.control_repetitions(sequence, self.nb_repetitions)
# TODO manage timing...
device.timing(sequence)
device.put(sequence) # TODO check why in Vialux's example put takes place before timing (no control)
# Start sequence
device.start(sequence, infinite_loop=self.infinite_loop)
# Wait sequence end
device.wait(infinite_loop=self.infinite_loop)
# Clean sequence
device.free(sequence)
return
class MovingBar(Protocol):
"""TODO add doc...
TODO complete...
"""
def __init__(self, w, l, x, y, theta, v, rate, n_repetitions=10):
Protocol.__init__(self)
self.w = w # arb. unit (i.e. width)
self.l = l # arb. unit (i.e. length)
self.x = x # arb. unit (i.e. x-coordinate)
self.y = y # arb. unit (i.e. y-coordinate)
self.theta = theta # rad (i.e. direction & orientation)
self.v = v # arb.unit / s (i.e. velocity)
self.rate = rate # Hz
self.picture_time = int(1.0e6 * self.rate) # ns
self.n_repetitions = n_repetitions
self.square_size = 30 # px
self.checkerboard_size = 5 * self.square_size # px
def project(self, device):
"""Project moving bar protocol"""
# return
raise NotImplementedError()
class Film(Protocol):
"""TODO add docstring...
TODO complete...
"""
binvec_pathname = os.path.join("E:", "BINVECS") # path to the BINVEC directory
def __init__(self, user_id=0, bin_id=0, vec_id=0, rate=40.0, nb_lut_frames=200, interactive=True):
Protocol.__init__(self)
if interactive:
print("# Film protocol")
# Print all the user directories.
user_dirnames = os.listdir(self.binvec_pathname)
for user_dirname_id, user_dirname in enumerate(user_dirnames):
print(" {}. {}".format(user_dirname_id, user_dirname))
# Prompt user identifier.
user_id = alp.utils.input("Enter the user number (e.g. {}): ".format(user_id), int)
user_dirname = user_dirnames[user_id]
user_pathname = os.path.join(self.binvec_pathname, user_dirname)
# Print all the BIN files.
bin_pathname = os.path.join(user_pathname, "Bin")
bin_filenames = [
name for name in os.listdir(bin_pathname) if os.path.isfile(os.path.join(bin_pathname, name))
]
for bin_filename_id, bin_filename in enumerate(bin_filenames):
print(" {}. {}".format(bin_filename_id, bin_filename))
# Prompt BIN filename identifier.
bin_id = alp.utils.input("Enter the .bin file number (e.g. {}): ".format(bin_id), int)
bin_filename = bin_filenames[bin_id]
bin_pathname = os.path.join(bin_pathname, bin_filename)
print(".bin pathname: {}".format(bin_pathname))
# Print all the VEC files.
vec_pathname = os.path.join(user_pathname, "Vec")
vec_filenames = [
name for name in os.listdir(vec_pathname) if os.path.isfile(os.path.join(vec_pathname, name))
]
for vec_filename_id, vec_filename in enumerate(vec_filenames):
print(" {}. {}".format(vec_filename_id, vec_filename))
# Prompt VEC filename identifier.
vec_id = alp.utils.input("Enter the .vec file number (e.g. {}): ".format(vec_id), int)
vec_filename = vec_filenames[vec_id]
vec_pathname = os.path.join(vec_pathname, vec_filename)
print(".vec pathname: {}".format(vec_pathname))
# Prompt the frame rate.
rate = alp.utils.input("Enter the frame rate [Hz] (e.g. {}): ".format(rate), float)
# Prompt the advanced features.
advanced = alp.utils.input("Advanced features (y/n): ", lambda arg: arg == "y")
if advanced:
# Prompt the number of frames in the look up table.
prompt = "Number of frames in the look up table (e.g. {}): ".format(nb_lut_frames)
nb_lut_frames = alp.utils.input(prompt, int)
self.user_id = user_id
self.bin_id = bin_id
self.vec_id = vec_id
user_dirnames = os.listdir(self.binvec_pathname)
user_dirname = user_dirnames[user_id]
user_pathname = os.path.join(self.binvec_pathname, user_dirname)
bin_pathname = os.path.join(user_pathname, "Vec")
bin_filenames = [name for name in os.listdir(bin_pathname) if os.path.isfile(os.path.join(bin_pathname, name))]
bin_filename = bin_filenames[bin_id]
self.bin_pathname = os.path.join(bin_pathname, bin_filename)
vec_pathname = os.path.join(user_pathname, "Bin")
vec_filenames = [name for name in os.listdir(vec_pathname) if os.path.isfile(os.path.join(vec_pathname, name))]
vec_filename = vec_filenames[vec_id]
self.vec_pathname = os.path.join(vec_pathname, vec_filename)
self.rate = rate
self.nb_lut_frames = nb_lut_frames
def get_sequence(self, sequence_id):
raise NotImplementedError()
@staticmethod
def wait(device, sleep_duration=30.0e-3):
"""TODO add doc..."""
queue_info = device.inquire_projection('progress')
while queue_info.nWaitingSequences == 1:
queue_info = device.inquire_projection('progress')
time.sleep(sleep_duration)
device.control_projection(reset_queue=True)
def project(self, device):
"""Project film protocol"""
device.control_projection(inversion=False, queue_mode=True)
# TODO check if queue mode toggle should come after allocations...
sequence_1 = alp.sequence.Checkerboard(seed=42)
sequence_2 = alp.sequence.Checkerboard(seed=None)
# Setup first sequence
device.allocate(sequence_1)
# TODO manage control...
# device.control(sequence_1)
# TODO manage timing...
# device.timing(sequence_1)
# Setup second sequence
device.allocate(sequence_2)
# TODO manage control...
# device.control(sequence_2)
# TODO manage timing...
# device.timing(sequence_2)
# Start first sequence
print("Start sequence 1")
device.put(sequence_1)
device.start(sequence_1)
# Start second sequence
print("Start sequence 2")
device.put(sequence_2)
device.start(sequence_2)
# For each repetition
print("Start infernal loop")
for rep in range(0, self.nb_repetitions):
# Wait end of first sequence
self.wait(device)
# Manage first sequence
device.free(sequence_1)
sequence_1 = alp.sequence.Checkerboard(seed=42)
device.allocate(sequence_1)
# TODO manage timing...
# device.timing(sequence_1)
device.put(sequence_1)
device.start(sequence_1)
# Wait end of second sequence
self.wait(device)
# Manage second sequence
device.free(sequence_2)
sequence_2 = alp.sequence.Checkerboard(seed=None)
device.allocate(sequence_2)
# TODO manage timing...
# device.timing(sequence_2)
device.put(sequence_2)
device.start(sequence_2)
# Wait end of first sequence
self.wait(device)
# Clean first sequence
device.free(sequence_1)
# Wait end of second sequence
device.wait()
# Clean second sequence
device.free(sequence_2)
return
| 37.882086
| 120
| 0.605112
| 1,966
| 16,706
| 4.964395
| 0.102747
| 0.039344
| 0.02623
| 0.031967
| 0.816598
| 0.779918
| 0.74457
| 0.712807
| 0.703279
| 0.677254
| 0
| 0.011195
| 0.299533
| 16,706
| 440
| 121
| 37.968182
| 0.822851
| 0.190949
| 0
| 0.682171
| 0
| 0
| 0.057107
| 0
| 0
| 0
| 0
| 0.038636
| 0
| 1
| 0.073643
| false
| 0.003876
| 0.01938
| 0
| 0.155039
| 0.081395
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
537236d03a2ed73be8ea0b666dab5a8a0c255e8f
| 36,371
|
py
|
Python
|
angr/procedures/definitions/win32_authz.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_authz.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
angr/procedures/definitions/win32_authz.py
|
r4b3rt/angr
|
c133cfd4f83ffea2a1d9e064241e9459eaabc55f
|
[
"BSD-2-Clause"
] | null | null | null |
# pylint:disable=line-too-long
import logging
from ...sim_type import SimTypeFunction, SimTypeShort, SimTypeInt, SimTypeLong, SimTypeLongLong, SimTypeDouble, SimTypeFloat, SimTypePointer, SimTypeChar, SimStruct, SimTypeFixedSizeArray, SimTypeBottom, SimUnion, SimTypeBool
from ...calling_conventions import SimCCStdcall, SimCCMicrosoftAMD64
from .. import SIM_PROCEDURES as P
from . import SimLibrary
_l = logging.getLogger(name=__name__)
lib = SimLibrary()
lib.set_default_cc('X86', SimCCStdcall)
lib.set_default_cc('AMD64', SimCCMicrosoftAMD64)
lib.set_library_names("authz.dll")
prototypes = \
{
#
'AuthzAccessCheck': SimTypeFunction([SimTypeInt(signed=False, label="AUTHZ_ACCESS_CHECK_FLAGS"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"DesiredAccess": SimTypeInt(signed=False, label="UInt32"), "PrincipalSelfSid": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "ObjectTypeList": SimTypePointer(SimTypeBottom(label="OBJECT_TYPE_LIST"), offset=0), "ObjectTypeListLength": SimTypeInt(signed=False, label="UInt32"), "OptionalArguments": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="AUTHZ_ACCESS_REQUEST", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Revision": SimTypeChar(label="Byte"), "Sbz1": SimTypeChar(label="Byte"), "Control": SimTypeShort(signed=False, label="UInt16"), "Owner": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Group": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Sacl": SimTypePointer(SimStruct({"AclRevision": SimTypeChar(label="Byte"), "Sbz1": SimTypeChar(label="Byte"), "AclSize": SimTypeShort(signed=False, label="UInt16"), "AceCount": SimTypeShort(signed=False, label="UInt16"), "Sbz2": SimTypeShort(signed=False, label="UInt16")}, name="ACL", pack=False, align=None), offset=0), "Dacl": SimTypePointer(SimStruct({"AclRevision": SimTypeChar(label="Byte"), "Sbz1": SimTypeChar(label="Byte"), "AclSize": SimTypeShort(signed=False, label="UInt16"), "AceCount": SimTypeShort(signed=False, label="UInt16"), "Sbz2": SimTypeShort(signed=False, label="UInt16")}, name="ACL", pack=False, align=None), offset=0)}, name="SECURITY_DESCRIPTOR", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({"Revision": SimTypeChar(label="Byte"), "Sbz1": SimTypeChar(label="Byte"), "Control": SimTypeShort(signed=False, label="UInt16"), "Owner": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Group": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Sacl": SimTypePointer(SimStruct({"AclRevision": SimTypeChar(label="Byte"), "Sbz1": SimTypeChar(label="Byte"), "AclSize": SimTypeShort(signed=False, label="UInt16"), "AceCount": SimTypeShort(signed=False, label="UInt16"), "Sbz2": SimTypeShort(signed=False, label="UInt16")}, name="ACL", pack=False, align=None), offset=0), "Dacl": SimTypePointer(SimStruct({"AclRevision": SimTypeChar(label="Byte"), "Sbz1": SimTypeChar(label="Byte"), "AclSize": SimTypeShort(signed=False, label="UInt16"), "AceCount": SimTypeShort(signed=False, label="UInt16"), "Sbz2": SimTypeShort(signed=False, label="UInt16")}, name="ACL", pack=False, align=None), offset=0)}, name="SECURITY_DESCRIPTOR", pack=False, align=None), offset=0), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"ResultListLength": SimTypeInt(signed=False, label="UInt32"), "GrantedAccessMask": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "SaclEvaluationResults": SimTypePointer(SimTypeInt(signed=False, label="AUTHZ_GENERATE_RESULTS"), offset=0), "Error": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)}, name="AUTHZ_ACCESS_REPLY", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Flags", "hAuthzClientContext", "pRequest", "hAuditEvent", "pSecurityDescriptor", "OptionalSecurityDescriptorArray", "OptionalSecurityDescriptorCount", "pReply", "phAccessCheckResults"]),
#
'AuthzCachedAccessCheck': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"DesiredAccess": SimTypeInt(signed=False, label="UInt32"), "PrincipalSelfSid": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "ObjectTypeList": SimTypePointer(SimTypeBottom(label="OBJECT_TYPE_LIST"), offset=0), "ObjectTypeListLength": SimTypeInt(signed=False, label="UInt32"), "OptionalArguments": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="AUTHZ_ACCESS_REQUEST", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"ResultListLength": SimTypeInt(signed=False, label="UInt32"), "GrantedAccessMask": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "SaclEvaluationResults": SimTypePointer(SimTypeInt(signed=False, label="AUTHZ_GENERATE_RESULTS"), offset=0), "Error": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)}, name="AUTHZ_ACCESS_REPLY", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Flags", "hAccessCheckResults", "pRequest", "hAuditEvent", "pReply"]),
#
'AuthzOpenObjectAudit': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"DesiredAccess": SimTypeInt(signed=False, label="UInt32"), "PrincipalSelfSid": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "ObjectTypeList": SimTypePointer(SimTypeBottom(label="OBJECT_TYPE_LIST"), offset=0), "ObjectTypeListLength": SimTypeInt(signed=False, label="UInt32"), "OptionalArguments": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="AUTHZ_ACCESS_REQUEST", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Revision": SimTypeChar(label="Byte"), "Sbz1": SimTypeChar(label="Byte"), "Control": SimTypeShort(signed=False, label="UInt16"), "Owner": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Group": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Sacl": SimTypePointer(SimStruct({"AclRevision": SimTypeChar(label="Byte"), "Sbz1": SimTypeChar(label="Byte"), "AclSize": SimTypeShort(signed=False, label="UInt16"), "AceCount": SimTypeShort(signed=False, label="UInt16"), "Sbz2": SimTypeShort(signed=False, label="UInt16")}, name="ACL", pack=False, align=None), offset=0), "Dacl": SimTypePointer(SimStruct({"AclRevision": SimTypeChar(label="Byte"), "Sbz1": SimTypeChar(label="Byte"), "AclSize": SimTypeShort(signed=False, label="UInt16"), "AceCount": SimTypeShort(signed=False, label="UInt16"), "Sbz2": SimTypeShort(signed=False, label="UInt16")}, name="ACL", pack=False, align=None), offset=0)}, name="SECURITY_DESCRIPTOR", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimStruct({"Revision": SimTypeChar(label="Byte"), "Sbz1": SimTypeChar(label="Byte"), "Control": SimTypeShort(signed=False, label="UInt16"), "Owner": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Group": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Sacl": SimTypePointer(SimStruct({"AclRevision": SimTypeChar(label="Byte"), "Sbz1": SimTypeChar(label="Byte"), "AclSize": SimTypeShort(signed=False, label="UInt16"), "AceCount": SimTypeShort(signed=False, label="UInt16"), "Sbz2": SimTypeShort(signed=False, label="UInt16")}, name="ACL", pack=False, align=None), offset=0), "Dacl": SimTypePointer(SimStruct({"AclRevision": SimTypeChar(label="Byte"), "Sbz1": SimTypeChar(label="Byte"), "AclSize": SimTypeShort(signed=False, label="UInt16"), "AceCount": SimTypeShort(signed=False, label="UInt16"), "Sbz2": SimTypeShort(signed=False, label="UInt16")}, name="ACL", pack=False, align=None), offset=0)}, name="SECURITY_DESCRIPTOR", pack=False, align=None), offset=0), label="LPArray", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"ResultListLength": SimTypeInt(signed=False, label="UInt32"), "GrantedAccessMask": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), "SaclEvaluationResults": SimTypePointer(SimTypeInt(signed=False, label="AUTHZ_GENERATE_RESULTS"), offset=0), "Error": SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)}, name="AUTHZ_ACCESS_REPLY", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Flags", "hAuthzClientContext", "pRequest", "hAuditEvent", "pSecurityDescriptor", "OptionalSecurityDescriptorArray", "OptionalSecurityDescriptorCount", "pReply"]),
#
'AuthzFreeHandle': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAccessCheckResults"]),
#
'AuthzInitializeResourceManager': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeFunction([SimTypeBottom(label="AUTHZ_CLIENT_CONTEXT_HANDLE"), SimTypePointer(SimTypeBottom(label="ACE_HEADER"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAuthzClientContext", "pAce", "pArgs", "pbAceApplicable"]), offset=0), SimTypePointer(SimTypeFunction([SimTypeBottom(label="AUTHZ_CLIENT_CONTEXT_HANDLE"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="SID_AND_ATTRIBUTES"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="SID_AND_ATTRIBUTES"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAuthzClientContext", "Args", "pSidAttrArray", "pSidCount", "pRestrictedSidAttrArray", "pRestrictedSidCount"]), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="SID_AND_ATTRIBUTES"), offset=0)], SimTypeBottom(label="Void"), arg_names=["pSidAttrArray"]), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Flags", "pfnDynamicAccessCheck", "pfnComputeDynamicGroups", "pfnFreeDynamicGroups", "szResourceManagerName", "phAuthzResourceManager"]),
#
'AuthzInitializeResourceManagerEx': SimTypeFunction([SimTypeInt(signed=False, label="AUTHZ_RESOURCE_MANAGER_FLAGS"), SimTypePointer(SimStruct({"version": SimTypeShort(signed=False, label="UInt16"), "szResourceManagerName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "pfnDynamicAccessCheck": SimTypePointer(SimTypeFunction([SimTypeBottom(label="AUTHZ_CLIENT_CONTEXT_HANDLE"), SimTypePointer(SimTypeBottom(label="ACE_HEADER"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAuthzClientContext", "pAce", "pArgs", "pbAceApplicable"]), offset=0), "pfnComputeDynamicGroups": SimTypePointer(SimTypeFunction([SimTypeBottom(label="AUTHZ_CLIENT_CONTEXT_HANDLE"), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="SID_AND_ATTRIBUTES"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="SID_AND_ATTRIBUTES"), offset=0), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAuthzClientContext", "Args", "pSidAttrArray", "pSidCount", "pRestrictedSidAttrArray", "pRestrictedSidCount"]), offset=0), "pfnFreeDynamicGroups": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="SID_AND_ATTRIBUTES"), offset=0)], SimTypeBottom(label="Void"), arg_names=["pSidAttrArray"]), offset=0), "pfnGetCentralAccessPolicy": SimTypePointer(SimTypeFunction([SimTypeBottom(label="AUTHZ_CLIENT_CONTEXT_HANDLE"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0), SimTypePointer(SimTypePointer(SimTypeBottom(label="Void"), offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAuthzClientContext", "capid", "pArgs", "pCentralAccessPolicyApplicable", "ppCentralAccessPolicy"]), offset=0), "pfnFreeCentralAccessPolicy": SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeBottom(label="Void"), arg_names=["pCentralAccessPolicy"]), offset=0)}, name="AUTHZ_INIT_INFO", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Flags", "pAuthzInitInfo", "phAuthzResourceManager"]),
#
'AuthzInitializeRemoteResourceManager': SimTypeFunction([SimTypePointer(SimStruct({"version": SimTypeShort(signed=False, label="UInt16"), "ObjectUuid": SimTypePointer(SimTypeChar(label="Char"), offset=0), "ProtSeq": SimTypePointer(SimTypeChar(label="Char"), offset=0), "NetworkAddr": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Endpoint": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Options": SimTypePointer(SimTypeChar(label="Char"), offset=0), "ServerSpn": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="AUTHZ_RPC_INIT_INFO_CLIENT", pack=False, align=None), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["pRpcInitInfo", "phAuthzResourceManager"]),
#
'AuthzFreeResourceManager': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAuthzResourceManager"]),
#
'AuthzInitializeContextFromToken': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0), SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="LUID", pack=False, align=None), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Flags", "TokenHandle", "hAuthzResourceManager", "pExpirationTime", "Identifier", "DynamicGroupArgs", "phAuthzClientContext"]),
#
'AuthzInitializeContextFromSid': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0), SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="LUID", pack=False, align=None), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Flags", "UserSid", "hAuthzResourceManager", "pExpirationTime", "Identifier", "DynamicGroupArgs", "phAuthzClientContext"]),
#
'AuthzInitializeContextFromAuthzContext': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimUnion({"Anonymous": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_Anonymous_e__Struct", pack=False, align=None), "u": SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="_u_e__Struct", pack=False, align=None), "QuadPart": SimTypeLongLong(signed=True, label="Int64")}, name="<anon>", label="None"), offset=0), SimStruct({"LowPart": SimTypeInt(signed=False, label="UInt32"), "HighPart": SimTypeInt(signed=True, label="Int32")}, name="LUID", pack=False, align=None), SimTypePointer(SimTypeBottom(label="Void"), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["Flags", "hAuthzClientContext", "pExpirationTime", "Identifier", "DynamicGroupArgs", "phNewAuthzClientContext"]),
#
'AuthzInitializeCompoundContext': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["UserContext", "DeviceContext", "phCompoundContext"]),
#
'AuthzAddSidsToContext': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Sid": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Attributes": SimTypeInt(signed=False, label="UInt32")}, name="SID_AND_ATTRIBUTES", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"Sid": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Attributes": SimTypeInt(signed=False, label="UInt32")}, name="SID_AND_ATTRIBUTES", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAuthzClientContext", "Sids", "SidCount", "RestrictedSids", "RestrictedSidCount", "phNewAuthzClientContext"]),
#
'AuthzModifySecurityAttributes': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=False, label="AUTHZ_SECURITY_ATTRIBUTE_OPERATION"), offset=0), SimTypePointer(SimStruct({"Version": SimTypeShort(signed=False, label="UInt16"), "Reserved": SimTypeShort(signed=False, label="UInt16"), "AttributeCount": SimTypeInt(signed=False, label="UInt32"), "Attribute": SimUnion({"pAttributeV1": SimTypePointer(SimStruct({"pName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "ValueType": SimTypeShort(signed=False, label="UInt16"), "Reserved": SimTypeShort(signed=False, label="UInt16"), "Flags": SimTypeInt(signed=False, label="AUTHZ_SECURITY_ATTRIBUTE_FLAGS"), "ValueCount": SimTypeInt(signed=False, label="UInt32"), "Values": SimUnion({"pInt64": SimTypePointer(SimTypeLongLong(signed=True, label="Int64"), offset=0), "pUint64": SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0), "ppString": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "pFqbn": SimTypePointer(SimStruct({"Version": SimTypeLongLong(signed=False, label="UInt64"), "pName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="AUTHZ_SECURITY_ATTRIBUTE_FQBN_VALUE", pack=False, align=None), offset=0), "pOctetString": SimTypePointer(SimStruct({"pValue": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "ValueLength": SimTypeInt(signed=False, label="UInt32")}, name="AUTHZ_SECURITY_ATTRIBUTE_OCTET_STRING_VALUE", pack=False, align=None), offset=0)}, name="<anon>", label="None")}, name="AUTHZ_SECURITY_ATTRIBUTE_V1", pack=False, align=None), offset=0)}, name="<anon>", label="None")}, name="AUTHZ_SECURITY_ATTRIBUTES_INFORMATION", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAuthzClientContext", "pOperations", "pAttributes"]),
#
'AuthzModifyClaims': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="AUTHZ_CONTEXT_INFORMATION_CLASS"), SimTypePointer(SimTypeInt(signed=False, label="AUTHZ_SECURITY_ATTRIBUTE_OPERATION"), offset=0), SimTypePointer(SimStruct({"Version": SimTypeShort(signed=False, label="UInt16"), "Reserved": SimTypeShort(signed=False, label="UInt16"), "AttributeCount": SimTypeInt(signed=False, label="UInt32"), "Attribute": SimUnion({"pAttributeV1": SimTypePointer(SimStruct({"pName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "ValueType": SimTypeShort(signed=False, label="UInt16"), "Reserved": SimTypeShort(signed=False, label="UInt16"), "Flags": SimTypeInt(signed=False, label="AUTHZ_SECURITY_ATTRIBUTE_FLAGS"), "ValueCount": SimTypeInt(signed=False, label="UInt32"), "Values": SimUnion({"pInt64": SimTypePointer(SimTypeLongLong(signed=True, label="Int64"), offset=0), "pUint64": SimTypePointer(SimTypeLongLong(signed=False, label="UInt64"), offset=0), "ppString": SimTypePointer(SimTypePointer(SimTypeChar(label="Char"), offset=0), offset=0), "pFqbn": SimTypePointer(SimStruct({"Version": SimTypeLongLong(signed=False, label="UInt64"), "pName": SimTypePointer(SimTypeChar(label="Char"), offset=0)}, name="AUTHZ_SECURITY_ATTRIBUTE_FQBN_VALUE", pack=False, align=None), offset=0), "pOctetString": SimTypePointer(SimStruct({"pValue": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "ValueLength": SimTypeInt(signed=False, label="UInt32")}, name="AUTHZ_SECURITY_ATTRIBUTE_OCTET_STRING_VALUE", pack=False, align=None), offset=0)}, name="<anon>", label="None")}, name="AUTHZ_SECURITY_ATTRIBUTE_V1", pack=False, align=None), offset=0)}, name="<anon>", label="None")}, name="AUTHZ_SECURITY_ATTRIBUTES_INFORMATION", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAuthzClientContext", "ClaimClass", "pClaimOperations", "pClaims"]),
#
'AuthzModifySids': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="AUTHZ_CONTEXT_INFORMATION_CLASS"), SimTypePointer(SimTypeInt(signed=False, label="AUTHZ_SID_OPERATION"), offset=0), SimTypePointer(SimStruct({"GroupCount": SimTypeInt(signed=False, label="UInt32"), "Groups": SimTypePointer(SimStruct({"Sid": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Attributes": SimTypeInt(signed=False, label="UInt32")}, name="SID_AND_ATTRIBUTES", pack=False, align=None), offset=0)}, name="TOKEN_GROUPS", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAuthzClientContext", "SidClass", "pSidOperations", "pSids"]),
#
'AuthzSetAppContainerInformation': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"Sid": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "Attributes": SimTypeInt(signed=False, label="UInt32")}, name="SID_AND_ATTRIBUTES", pack=False, align=None), label="LPArray", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAuthzClientContext", "pAppContainerSid", "CapabilityCount", "pCapabilitySids"]),
#
'AuthzGetInformationFromContext': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="AUTHZ_CONTEXT_INFORMATION_CLASS"), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAuthzClientContext", "InfoClass", "BufferSize", "pSizeRequired", "Buffer"]),
#
'AuthzFreeContext': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAuthzClientContext"]),
#
'AuthzInitializeObjectAccessAuditEvent': SimTypeFunction([SimTypeInt(signed=False, label="AUTHZ_INITIALIZE_OBJECT_ACCESS_AUDIT_EVENT_FLAGS"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["Flags", "hAuditEventType", "szOperationType", "szObjectType", "szObjectName", "szAdditionalInfo", "phAuditEvent", "dwAdditionalParameterCount"]),
#
'AuthzInitializeObjectAccessAuditEvent2': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["Flags", "hAuditEventType", "szOperationType", "szObjectType", "szObjectName", "szAdditionalInfo", "szAdditionalInfo2", "phAuditEvent", "dwAdditionalParameterCount"]),
#
'AuthzFreeAuditEvent': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hAuditEvent"]),
#
'AuthzEvaluateSacl': SimTypeFunction([SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"DesiredAccess": SimTypeInt(signed=False, label="UInt32"), "PrincipalSelfSid": SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), "ObjectTypeList": SimTypePointer(SimTypeBottom(label="OBJECT_TYPE_LIST"), offset=0), "ObjectTypeListLength": SimTypeInt(signed=False, label="UInt32"), "OptionalArguments": SimTypePointer(SimTypeBottom(label="Void"), offset=0)}, name="AUTHZ_ACCESS_REQUEST", pack=False, align=None), offset=0), SimTypePointer(SimStruct({"AclRevision": SimTypeChar(label="Byte"), "Sbz1": SimTypeChar(label="Byte"), "AclSize": SimTypeShort(signed=False, label="UInt16"), "AceCount": SimTypeShort(signed=False, label="UInt16"), "Sbz2": SimTypeShort(signed=False, label="UInt16")}, name="ACL", pack=False, align=None), offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypeInt(signed=True, label="Int32"), SimTypePointer(SimTypeInt(signed=True, label="Int32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["AuthzClientContext", "pRequest", "Sacl", "GrantedAccess", "AccessGranted", "pbGenerateAudit"]),
#
'AuthzInstallSecurityEventSource': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"dwFlags": SimTypeInt(signed=False, label="UInt32"), "szEventSourceName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "szEventMessageFile": SimTypePointer(SimTypeChar(label="Char"), offset=0), "szEventSourceXmlSchemaFile": SimTypePointer(SimTypeChar(label="Char"), offset=0), "szEventAccessStringsFile": SimTypePointer(SimTypeChar(label="Char"), offset=0), "szExecutableImagePath": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Anonymous": SimUnion({"pReserved": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "pProviderGuid": SimTypePointer(SimTypeBottom(label="Guid"), offset=0)}, name="<anon>", label="None"), "dwObjectTypeNameCount": SimTypeInt(signed=False, label="UInt32"), "ObjectTypeNames": SimTypePointer(SimStruct({"szObjectTypeName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "dwOffset": SimTypeInt(signed=False, label="UInt32")}, name="AUTHZ_REGISTRATION_OBJECT_TYPE_NAME_OFFSET", pack=False, align=None), offset=0)}, name="AUTHZ_SOURCE_SCHEMA_REGISTRATION", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwFlags", "pRegistration"]),
#
'AuthzUninstallSecurityEventSource': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwFlags", "szEventSourceName"]),
#
'AuthzEnumerateSecurityEventSources': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimStruct({"dwFlags": SimTypeInt(signed=False, label="UInt32"), "szEventSourceName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "szEventMessageFile": SimTypePointer(SimTypeChar(label="Char"), offset=0), "szEventSourceXmlSchemaFile": SimTypePointer(SimTypeChar(label="Char"), offset=0), "szEventAccessStringsFile": SimTypePointer(SimTypeChar(label="Char"), offset=0), "szExecutableImagePath": SimTypePointer(SimTypeChar(label="Char"), offset=0), "Anonymous": SimUnion({"pReserved": SimTypePointer(SimTypeBottom(label="Void"), offset=0), "pProviderGuid": SimTypePointer(SimTypeBottom(label="Guid"), offset=0)}, name="<anon>", label="None"), "dwObjectTypeNameCount": SimTypeInt(signed=False, label="UInt32"), "ObjectTypeNames": SimTypePointer(SimStruct({"szObjectTypeName": SimTypePointer(SimTypeChar(label="Char"), offset=0), "dwOffset": SimTypeInt(signed=False, label="UInt32")}, name="AUTHZ_REGISTRATION_OBJECT_TYPE_NAME_OFFSET", pack=False, align=None), offset=0)}, name="AUTHZ_SOURCE_SCHEMA_REGISTRATION", pack=False, align=None), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0), SimTypePointer(SimTypeInt(signed=False, label="UInt32"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwFlags", "Buffer", "pdwCount", "pdwLength"]),
#
'AuthzRegisterSecurityEventSource': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeChar(label="Char"), offset=0), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwFlags", "szEventSourceName", "phEventProvider"]),
#
'AuthzUnregisterSecurityEventSource': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwFlags", "phEventProvider"]),
#
'AuthzReportSecurityEvent': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32")], SimTypeInt(signed=True, label="Int32"), arg_names=["dwFlags", "hEventProvider", "dwAuditId", "pUserSid", "dwCount"]),
#
'AuthzReportSecurityEventFromParams': SimTypeFunction([SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypeInt(signed=False, label="UInt32"), SimTypePointer(SimTypeInt(signed=True, label="Int"), label="IntPtr", offset=0), SimTypePointer(SimStruct({"Length": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Count": SimTypeShort(signed=False, label="UInt16"), "Parameters": SimTypePointer(SimStruct({"Type": SimTypeInt(signed=False, label="AUDIT_PARAM_TYPE"), "Length": SimTypeInt(signed=False, label="UInt32"), "Flags": SimTypeInt(signed=False, label="UInt32"), "Anonymous1": SimUnion({"Data0": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "String": SimTypePointer(SimTypeChar(label="Char"), offset=0), "u": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "psid": SimTypePointer(SimTypeBottom(label="SID"), offset=0), "pguid": SimTypePointer(SimTypeBottom(label="Guid"), offset=0), "LogonId_LowPart": SimTypeInt(signed=False, label="UInt32"), "pObjectTypes": SimTypePointer(SimStruct({"Count": SimTypeShort(signed=False, label="UInt16"), "Flags": SimTypeShort(signed=False, label="UInt16"), "pObjectTypes": SimTypePointer(SimStruct({"ObjectType": SimTypeBottom(label="Guid"), "Flags": SimTypeShort(signed=False, label="UInt16"), "Level": SimTypeShort(signed=False, label="UInt16"), "AccessMask": SimTypeInt(signed=False, label="UInt32")}, name="AUDIT_OBJECT_TYPE", pack=False, align=None), offset=0)}, name="AUDIT_OBJECT_TYPES", pack=False, align=None), offset=0), "pIpAddress": SimTypePointer(SimStruct({"pIpAddress": SimTypeFixedSizeArray(SimTypeChar(label="Byte"), 128)}, name="AUDIT_IP_ADDRESS", pack=False, align=None), offset=0)}, name="<anon>", label="None"), "Anonymous2": SimUnion({"Data1": SimTypePointer(SimTypeInt(signed=False, label="UInt"), label="UIntPtr", offset=0), "LogonId_HighPart": SimTypeInt(signed=True, label="Int32")}, name="<anon>", label="None")}, name="AUDIT_PARAM", pack=False, align=None), offset=0)}, name="AUDIT_PARAMS", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["dwFlags", "hEventProvider", "dwAuditId", "pUserSid", "pParams"]),
#
'AuthzRegisterCapChangeNotification': SimTypeFunction([SimTypePointer(SimTypePointer(SimStruct({"unused": SimTypeInt(signed=True, label="Int32")}, name="AUTHZ_CAP_CHANGE_SUBSCRIPTION_HANDLE__", pack=False, align=None), offset=0), offset=0), SimTypePointer(SimTypeFunction([SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=False, label="UInt32"), arg_names=["lpThreadParameter"]), offset=0), SimTypePointer(SimTypeBottom(label="Void"), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["phCapChangeSubscription", "pfnCapChangeCallback", "pCallbackContext"]),
#
'AuthzUnregisterCapChangeNotification': SimTypeFunction([SimTypePointer(SimStruct({"unused": SimTypeInt(signed=True, label="Int32")}, name="AUTHZ_CAP_CHANGE_SUBSCRIPTION_HANDLE__", pack=False, align=None), offset=0)], SimTypeInt(signed=True, label="Int32"), arg_names=["hCapChangeSubscription"]),
#
'AuthzFreeCentralAccessPolicyCache': SimTypeFunction([], SimTypeInt(signed=True, label="Int32")),
}
lib.set_prototypes(prototypes)
| 413.306818
| 3,688
| 0.75398
| 3,842
| 36,371
| 7.069755
| 0.08303
| 0.063655
| 0.088948
| 0.106767
| 0.876224
| 0.86459
| 0.835653
| 0.829725
| 0.8136
| 0.809292
| 0
| 0.020076
| 0.063237
| 36,371
| 87
| 3,689
| 418.057471
| 0.777142
| 0.00077
| 0
| 0
| 0
| 0
| 0.263294
| 0.076085
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.106383
| 0
| 0.106383
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
5379805aa7023b59d8f0f3df3e851b27e3179499
| 245
|
py
|
Python
|
tests/test_filter_plugins.py
|
Temelio/ansible-role-jenkins
|
9506d9a007c47c674a7d3f80d71ecc83ff8cc004
|
[
"MIT"
] | null | null | null |
tests/test_filter_plugins.py
|
Temelio/ansible-role-jenkins
|
9506d9a007c47c674a7d3f80d71ecc83ff8cc004
|
[
"MIT"
] | null | null | null |
tests/test_filter_plugins.py
|
Temelio/ansible-role-jenkins
|
9506d9a007c47c674a7d3f80d71ecc83ff8cc004
|
[
"MIT"
] | 1
|
2022-02-21T09:32:38.000Z
|
2022-02-21T09:32:38.000Z
|
import pytest
from ansible import errors
#==============================================================================
# Tests
#==============================================================================
def test_fake():
assert True
| 22.272727
| 79
| 0.253061
| 12
| 245
| 5.083333
| 0.916667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.085714
| 245
| 10
| 80
| 24.5
| 0.272321
| 0.661224
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0.25
| 1
| 0.25
| true
| 0
| 0.5
| 0
| 0.75
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
53b68815c260c64259305c5021671aa9f1094165
| 147
|
py
|
Python
|
end2you/models/__init__.py
|
VincentKaras/end2you
|
2e49bc825ddf98dd49215ec2317247985de9070e
|
[
"BSD-3-Clause"
] | 85
|
2018-01-18T20:11:25.000Z
|
2022-03-29T19:51:22.000Z
|
end2you/models/__init__.py
|
VincentKaras/end2you
|
2e49bc825ddf98dd49215ec2317247985de9070e
|
[
"BSD-3-Clause"
] | 7
|
2018-02-27T22:43:37.000Z
|
2021-11-22T20:57:49.000Z
|
end2you/models/__init__.py
|
VincentKaras/end2you
|
2e49bc825ddf98dd49215ec2317247985de9070e
|
[
"BSD-3-Clause"
] | 18
|
2018-02-20T15:05:59.000Z
|
2022-02-25T15:25:57.000Z
|
import end2you.models.audio
import end2you.models.visual
import end2you.models.multimodal
import end2you.models.rnn
from .model_provider import *
| 21
| 32
| 0.843537
| 20
| 147
| 6.15
| 0.5
| 0.422764
| 0.617886
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.029851
| 0.088435
| 147
| 6
| 33
| 24.5
| 0.88806
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
53e87edc7b0efdadf9d2f12e78b605b46c081658
| 440
|
py
|
Python
|
modeldeployer/exceptions.py
|
AhmetGurbuzz/model-deployer
|
384910c2bf116375349d36a7fd129b0e9474c61c
|
[
"Apache-2.0"
] | null | null | null |
modeldeployer/exceptions.py
|
AhmetGurbuzz/model-deployer
|
384910c2bf116375349d36a7fd129b0e9474c61c
|
[
"Apache-2.0"
] | null | null | null |
modeldeployer/exceptions.py
|
AhmetGurbuzz/model-deployer
|
384910c2bf116375349d36a7fd129b0e9474c61c
|
[
"Apache-2.0"
] | null | null | null |
class IsNotList(Exception):
def __init__(self, message="object type is not List"):
self.message = message
super().__init__(self.message)
def __str__(self):
return f'{self.message}'
class IsNotFunction(Exception):
def __init__(self, message="object type is not Function"):
self.message = message
super().__init__(self.message)
def __str__(self):
return f'{self.message}'
| 23.157895
| 62
| 0.645455
| 52
| 440
| 5
| 0.346154
| 0.338462
| 0.230769
| 0.153846
| 0.830769
| 0.830769
| 0.830769
| 0.830769
| 0.830769
| 0.507692
| 0
| 0
| 0.236364
| 440
| 18
| 63
| 24.444444
| 0.77381
| 0
| 0
| 0.666667
| 0
| 0
| 0.178082
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.333333
| false
| 0
| 0
| 0.166667
| 0.666667
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 10
|
53f3ac5e856aa651b5d4256b3002721c17891bd8
| 113
|
py
|
Python
|
mmdet/version.py
|
qjy981010/CMaskTrack-RCNN
|
4fe9766e8e8141be896e72d5e92898b7647c4205
|
[
"Apache-2.0"
] | 54
|
2021-11-05T02:15:15.000Z
|
2022-03-23T13:40:43.000Z
|
mmdet/version.py
|
qjy981010/CMaskTrack-RCNN
|
4fe9766e8e8141be896e72d5e92898b7647c4205
|
[
"Apache-2.0"
] | 3
|
2021-11-12T04:43:23.000Z
|
2022-03-22T14:48:17.000Z
|
mmdet/version.py
|
qjy981010/CMaskTrack-RCNN
|
4fe9766e8e8141be896e72d5e92898b7647c4205
|
[
"Apache-2.0"
] | 6
|
2021-11-09T02:26:38.000Z
|
2022-03-05T01:38:30.000Z
|
# GENERATED VERSION FILE
# TIME: Wed Nov 4 17:09:23 2020
__version__ = '0.5.6+ecfb7d4'
short_version = '0.5.6'
| 18.833333
| 32
| 0.690265
| 21
| 113
| 3.47619
| 0.761905
| 0.219178
| 0.246575
| 0.273973
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.202128
| 0.168142
| 113
| 5
| 33
| 22.6
| 0.574468
| 0.469027
| 0
| 0
| 1
| 0
| 0.315789
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
54d321ac4dc066fd0ae01e97e71b4554fd3151e7
| 124
|
py
|
Python
|
protobuf_serialization/tests/utils.py
|
trusttoken/protobuf-serialization-py
|
da7748ad94429f7816ae23c8d5fcc0aca0203adf
|
[
"MIT"
] | 1
|
2020-03-15T09:55:03.000Z
|
2020-03-15T09:55:03.000Z
|
protobuf_serialization/tests/utils.py
|
alvinchow86/protobuf-serialization-py
|
af856b7b872317917274f74bb69418d19dafc3fa
|
[
"MIT"
] | null | null | null |
protobuf_serialization/tests/utils.py
|
alvinchow86/protobuf-serialization-py
|
af856b7b872317917274f74bb69418d19dafc3fa
|
[
"MIT"
] | null | null | null |
from datetime import datetime
from dateutil.tz import UTC
def utcnow():
return datetime.utcnow().replace(tzinfo=UTC)
| 15.5
| 48
| 0.758065
| 17
| 124
| 5.529412
| 0.647059
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.153226
| 124
| 7
| 49
| 17.714286
| 0.895238
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.25
| true
| 0
| 0.5
| 0.25
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 1
| 1
| 1
| 0
|
0
| 7
|
54d71374d63d5227edebe98a0af6a5135051f952
| 11,429
|
py
|
Python
|
rabbitmq/tests/test_rabbitmq.py
|
brentm5/integrations-core
|
5cac8788c95d8820435ef9c5d32d6a5463cf491d
|
[
"BSD-3-Clause"
] | 4
|
2021-06-21T19:21:49.000Z
|
2021-06-23T21:21:55.000Z
|
rabbitmq/tests/test_rabbitmq.py
|
brentm5/integrations-core
|
5cac8788c95d8820435ef9c5d32d6a5463cf491d
|
[
"BSD-3-Clause"
] | null | null | null |
rabbitmq/tests/test_rabbitmq.py
|
brentm5/integrations-core
|
5cac8788c95d8820435ef9c5d32d6a5463cf491d
|
[
"BSD-3-Clause"
] | 1
|
2021-06-21T19:21:51.000Z
|
2021-06-21T19:21:51.000Z
|
# (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import pika
import logging
import pytest
from contextlib import closing
from datadog_checks.rabbitmq import RabbitMQ
from . import common, metrics
log = logging.getLogger(__file__)
@pytest.mark.usefixtures('dd_environment')
def test_rabbitmq(aggregator, check):
check.check(common.CONFIG)
# Node attributes
for mname in metrics.COMMON_METRICS:
aggregator.assert_metric_has_tag_prefix(mname, 'rabbitmq_node', count=1)
from six import iteritems
for m, v in iteritems(aggregator._metrics):
log.warning("{} {}".format(m, v))
aggregator.assert_metric('rabbitmq.node.partitions', value=0, count=1)
aggregator.assert_metric('rabbitmq.connections',
tags=['rabbitmq_vhost:/', "tag1:1", "tag2"],
value=0, count=1)
aggregator.assert_metric('rabbitmq.connections',
tags=['rabbitmq_vhost:myvhost', "tag1:1", "tag2"],
value=0, count=1)
aggregator.assert_metric('rabbitmq.connections',
tags=['rabbitmq_vhost:myothervhost', "tag1:1", "tag2"],
value=0, count=1)
# Queue attributes, should be only one queue fetched
for mname in metrics.Q_METRICS:
aggregator.assert_metric_has_tag(mname, 'rabbitmq_queue:test1', count=1)
# Exchange attributes, should be only one exchange fetched
for mname in metrics.E_METRICS:
aggregator.assert_metric_has_tag(mname, 'rabbitmq_exchange:test1', count=1)
# Overview attributes
for mname in metrics.OVERVIEW_METRICS_TOTALS:
aggregator.assert_metric_has_tag(mname, 'rabbitmq_cluster:rabbitmqtest', count=1)
for mname in metrics.OVERVIEW_METRICS_MESSAGES:
# All messages metrics are not always present, so we assert with at_least=0
aggregator.assert_metric_has_tag(mname, 'rabbitmq_cluster:rabbitmqtest', at_least=0)
aggregator.assert_service_check('rabbitmq.aliveness',
tags=['vhost:/', "tag1:1", "tag2"],
status=RabbitMQ.OK)
aggregator.assert_service_check('rabbitmq.aliveness',
tags=['vhost:myvhost', "tag1:1", "tag2"],
status=RabbitMQ.OK)
aggregator.assert_service_check('rabbitmq.aliveness',
tags=['vhost:myothervhost', "tag1:1", "tag2"],
status=RabbitMQ.OK)
aggregator.assert_service_check('rabbitmq.status', tags=["tag1:1", "tag2"], status=RabbitMQ.OK)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures('dd_environment')
def test_regex(aggregator, check):
check.check(common.CONFIG_REGEX)
# Node attributes
for mname in metrics.COMMON_METRICS:
aggregator.assert_metric_has_tag_prefix(mname, 'rabbitmq_node', count=1)
aggregator.assert_metric('rabbitmq.connections', tags=['rabbitmq_vhost:/'], value=0, count=1)
aggregator.assert_metric('rabbitmq.connections', tags=['rabbitmq_vhost:myvhost'], value=0, count=1)
aggregator.assert_metric('rabbitmq.connections', tags=['rabbitmq_vhost:myothervhost'], value=0, count=1)
# Exchange attributes
for mname in metrics.E_METRICS:
aggregator.assert_metric_has_tag(mname, 'rabbitmq_exchange:test1', count=1)
aggregator.assert_metric_has_tag(mname, 'rabbitmq_exchange:test5', count=1)
aggregator.assert_metric_has_tag(mname, 'rabbitmq_exchange:tralala', count=0)
# Queue attributes
for mname in metrics.Q_METRICS:
aggregator.assert_metric_has_tag(mname, 'rabbitmq_queue:test1', count=3)
aggregator.assert_metric_has_tag(mname, 'rabbitmq_queue:test5', count=3)
aggregator.assert_metric_has_tag(mname, 'rabbitmq_queue:tralala', count=0)
# Overview attributes
for mname in metrics.OVERVIEW_METRICS_TOTALS:
aggregator.assert_metric_has_tag(mname, 'rabbitmq_cluster:rabbitmqtest', count=1)
for mname in metrics.OVERVIEW_METRICS_MESSAGES:
# All messages metrics are not always present, so we assert with at_least=0
aggregator.assert_metric_has_tag(mname, 'rabbitmq_cluster:rabbitmqtest', at_least=0)
aggregator.assert_service_check('rabbitmq.aliveness', tags=['vhost:/'], status=RabbitMQ.OK)
aggregator.assert_service_check('rabbitmq.aliveness', tags=['vhost:myvhost'], status=RabbitMQ.OK)
aggregator.assert_service_check('rabbitmq.aliveness', tags=['vhost:myothervhost'], status=RabbitMQ.OK)
aggregator.assert_service_check('rabbitmq.status', status=RabbitMQ.OK)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures('dd_environment')
def test_limit_vhosts(aggregator, check):
check.check(common.CONFIG_REGEX)
# Node attributes
for mname in metrics.COMMON_METRICS:
aggregator.assert_metric_has_tag_prefix(mname, 'rabbitmq_node', count=1)
aggregator.assert_metric('rabbitmq.connections', tags=['rabbitmq_vhost:/'], value=0, count=1)
aggregator.assert_metric('rabbitmq.connections', tags=['rabbitmq_vhost:myvhost'], value=0, count=1)
aggregator.assert_metric('rabbitmq.connections', tags=['rabbitmq_vhost:myothervhost'], value=0, count=1)
for mname in metrics.Q_METRICS:
aggregator.assert_metric_has_tag(mname, 'rabbitmq_queue:test1', count=3)
aggregator.assert_metric_has_tag(mname, 'rabbitmq_queue:test5', count=3)
aggregator.assert_metric_has_tag(mname, 'rabbitmq_queue:tralala', count=0)
for mname in metrics.E_METRICS:
aggregator.assert_metric(mname, count=2)
# Overview attributes
for mname in metrics.OVERVIEW_METRICS_TOTALS:
aggregator.assert_metric_has_tag(mname, 'rabbitmq_cluster:rabbitmqtest', count=1)
for mname in metrics.OVERVIEW_METRICS_MESSAGES:
# All messages metrics are not always present, so we assert with at_least=0
aggregator.assert_metric_has_tag(mname, 'rabbitmq_cluster:rabbitmqtest', at_least=0)
# Service checks
aggregator.assert_service_check('rabbitmq.aliveness', tags=['vhost:/'], status=RabbitMQ.OK)
aggregator.assert_service_check('rabbitmq.aliveness', tags=['vhost:myvhost'], status=RabbitMQ.OK)
aggregator.assert_service_check('rabbitmq.aliveness', tags=['vhost:myothervhost'], status=RabbitMQ.OK)
aggregator.assert_service_check('rabbitmq.status', status=RabbitMQ.OK)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures('dd_environment')
def test_family_tagging(aggregator, check):
check.check(common.CONFIG_WITH_FAMILY)
# Node attributes
for mname in metrics.COMMON_METRICS:
aggregator.assert_metric_has_tag_prefix(mname, 'rabbitmq_node', count=1)
aggregator.assert_metric('rabbitmq.connections', tags=['rabbitmq_vhost:/'], value=0, count=1)
aggregator.assert_metric('rabbitmq.connections', tags=['rabbitmq_vhost:myvhost'], value=0, count=1)
aggregator.assert_metric('rabbitmq.connections', tags=['rabbitmq_vhost:myothervhost'], value=0, count=1)
for mname in metrics.E_METRICS:
aggregator.assert_metric_has_tag(mname, 'rabbitmq_exchange_family:test', count=2)
for mname in metrics.Q_METRICS:
aggregator.assert_metric_has_tag(mname, 'rabbitmq_queue_family:test', count=6)
# Overview attributes
for mname in metrics.OVERVIEW_METRICS_TOTALS:
aggregator.assert_metric_has_tag(mname, 'rabbitmq_cluster:rabbitmqtest', count=1)
for mname in metrics.OVERVIEW_METRICS_MESSAGES:
# All messages metrics are not always present, so we assert with at_least=0
aggregator.assert_metric_has_tag(mname, 'rabbitmq_cluster:rabbitmqtest', at_least=0)
aggregator.assert_metric('rabbitmq.connections', tags=['rabbitmq_vhost:/'], value=0, count=1)
aggregator.assert_service_check('rabbitmq.aliveness', tags=['vhost:/'], status=RabbitMQ.OK)
aggregator.assert_service_check('rabbitmq.aliveness', tags=['vhost:myvhost'], status=RabbitMQ.OK)
aggregator.assert_service_check('rabbitmq.aliveness', tags=['vhost:myothervhost'], status=RabbitMQ.OK)
aggregator.assert_service_check('rabbitmq.status', status=RabbitMQ.OK)
aggregator.assert_all_metrics_covered()
@pytest.mark.usefixtures('dd_environment')
def test_connections(aggregator, check):
# no connections and no 'vhosts' list in the conf don't produce 'connections' metric
check.check(common.CONFIG)
aggregator.assert_metric('rabbitmq.connections',
tags=['rabbitmq_vhost:/', "tag1:1", "tag2"],
value=0, count=1)
aggregator.assert_metric('rabbitmq.connections',
tags=['rabbitmq_vhost:myvhost', "tag1:1", "tag2"],
value=0, count=1)
aggregator.assert_metric('rabbitmq.connections',
tags=['rabbitmq_vhost:myothervhost', "tag1:1", "tag2"],
value=0, count=1)
# no connections with a 'vhosts' list in the conf produce one metrics per vhost
aggregator.reset()
check.check(common.CONFIG_TEST_VHOSTS)
aggregator.assert_metric('rabbitmq.connections', tags=['rabbitmq_vhost:test'], value=0, count=1)
aggregator.assert_metric('rabbitmq.connections', tags=['rabbitmq_vhost:test2'], value=0, count=1)
aggregator.assert_metric('rabbitmq.connections', count=2)
with closing(pika.BlockingConnection()), closing(pika.BlockingConnection()):
aggregator.reset()
check.check(common.CONFIG)
aggregator.assert_metric('rabbitmq.connections',
tags=['rabbitmq_vhost:/', "tag1:1", "tag2"],
value=2, count=1)
aggregator.assert_metric('rabbitmq.connections', count=3)
aggregator.assert_metric('rabbitmq.connections.state',
tags=['rabbitmq_conn_state:running', "tag1:1", "tag2"],
value=2, count=1)
aggregator.reset()
check.check(common.CONFIG_DEFAULT_VHOSTS)
aggregator.assert_metric('rabbitmq.connections',
tags=['rabbitmq_vhost:/'],
value=2, count=1)
aggregator.assert_metric('rabbitmq.connections',
tags=['rabbitmq_vhost:test'],
value=0, count=1)
aggregator.assert_metric('rabbitmq.connections', count=2)
aggregator.assert_metric('rabbitmq.connections.state',
tags=['rabbitmq_conn_state:running'],
value=0, count=0)
aggregator.reset()
check.check(common.CONFIG_TEST_VHOSTS)
aggregator.assert_metric('rabbitmq.connections',
tags=['rabbitmq_vhost:test'],
value=0, count=1)
aggregator.assert_metric('rabbitmq.connections',
tags=['rabbitmq_vhost:test2'],
value=0, count=1)
aggregator.assert_metric('rabbitmq.connections', count=2)
aggregator.assert_metric('rabbitmq.connections.state',
tags=['rabbitmq_conn_state:running'],
value=0, count=0)
| 48.84188
| 108
| 0.675737
| 1,314
| 11,429
| 5.668189
| 0.092085
| 0.165414
| 0.168367
| 0.124866
| 0.907223
| 0.890709
| 0.875134
| 0.869629
| 0.863588
| 0.840628
| 0
| 0.015441
| 0.212355
| 11,429
| 233
| 109
| 49.051502
| 0.811931
| 0.075335
| 0
| 0.787879
| 0
| 0
| 0.226932
| 0.080891
| 0
| 0
| 0
| 0
| 0.466667
| 1
| 0.030303
| false
| 0
| 0.042424
| 0
| 0.072727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
54dbbea24c4b8c66e10cd68635453e3898ec804a
| 256
|
py
|
Python
|
test.py
|
xinhangchen/test
|
d625c3abc93c8469f29e06c80a25d886cffdc92c
|
[
"MIT"
] | 4
|
2018-02-06T03:15:36.000Z
|
2018-02-06T03:16:32.000Z
|
test.py
|
xinhangchen/test
|
d625c3abc93c8469f29e06c80a25d886cffdc92c
|
[
"MIT"
] | 1
|
2018-07-29T10:46:04.000Z
|
2018-07-29T10:46:04.000Z
|
test.py
|
xinhangchen/test
|
d625c3abc93c8469f29e06c80a25d886cffdc92c
|
[
"MIT"
] | 1
|
2018-07-29T10:08:29.000Z
|
2018-07-29T10:08:29.000Z
|
nun = 1
#var 1
# I insert new .
kang kang kang zhen suai
# yumujun yumujun yumujun
# yumujun yumujun yumujun
# yumujun yumujun yumujun
# yumujun yumujun yumujun
# yumujun yumujun yumujun
# yumujun yumujun yumujun
# yumujun yumujun yumujun
| 9.481481
| 26
| 0.722656
| 33
| 256
| 5.606061
| 0.30303
| 1.513514
| 2.156757
| 2.724324
| 0.794595
| 0.794595
| 0.794595
| 0.794595
| 0.794595
| 0.794595
| 0
| 0.010204
| 0.234375
| 256
| 26
| 27
| 9.846154
| 0.933673
| 0.734375
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0
| null | null | 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
0756033dc7c817b205b07d9d8619f9b9d47b3376
| 3,403
|
py
|
Python
|
tests/test_folder.py
|
ariel415el/GPDM
|
50e0a3c3897eb5bbcec81c44a5668d230cdfd26c
|
[
"Apache-2.0"
] | 18
|
2021-11-16T19:09:09.000Z
|
2022-03-31T23:29:39.000Z
|
tests/test_folder.py
|
ariel415el/GPDM
|
50e0a3c3897eb5bbcec81c44a5668d230cdfd26c
|
[
"Apache-2.0"
] | 1
|
2022-03-30T16:36:36.000Z
|
2022-03-30T16:42:22.000Z
|
tests/test_folder.py
|
ariel415el/GPDM
|
50e0a3c3897eb5bbcec81c44a5668d230cdfd26c
|
[
"Apache-2.0"
] | 2
|
2022-03-30T15:17:09.000Z
|
2022-03-31T23:29:45.000Z
|
import os
import numpy as np
from tests.SIFID.sifid_score import calculate_sifid_given_paths
from tests.compute_diversity import compute_images_diversity
def test(reference_dir, synthetic_root):
all_synthetic_dirs = [os.path.join(synthetic_root, x) for x in os.listdir(synthetic_root)]
diversity = compute_images_diversity(reference_dir, all_synthetic_dirs)
scores = []
for synthetic_dir in all_synthetic_dirs:
sfid = calculate_sifid_given_paths(reference_dir, synthetic_dir, 1, False, 64)
scores.append(sfid)
# print(f"{os.path.basename}: SFID: {np.mean(scores):.3f} +- {np.std(scores):.3f}, Diversity: {diversity:.3f}")
print(f"{os.path.basename(synthetic_root).ljust(30)}: SFID: {np.mean(scores):.3f}, Diversity: {diversity:.3f}")
if __name__ == '__main__':
np.set_printoptions(suppress=True)
reference_dir = '/home/ariel/university/GPDM/images/SIGD16'
test(reference_dir, '/home/ariel/university/Efficient-GPNN/scripts/outputs/reshuffle/SIGD16_alpha=1')
test(reference_dir, '/home/ariel/university/Efficient-GPNN/scripts/outputs/reshuffle/SIGD16_alpha=0.005')
test(reference_dir, '/home/ariel/university/Efficient-GPNN/scripts/outputs/reshuffle/SIGD16_faissIVF-50')
test(reference_dir, '/home/ariel/university/Efficient-GPNN/scripts/outputs/reshuffle/SIGD16_target_alpha=1')
test(reference_dir, '/home/ariel/university/Efficient-GPNN/scripts/outputs/reshuffle/SIGD16_target_alpha=0.005')
test(reference_dir, '/home/ariel/university/Efficient-GPNN/scripts/outputs/reshuffle/SIGD16_target_faissIVF-50')
test(reference_dir, '/home/ariel/university/GPDM/scripts/outputs/reshuffle/SIGD16_28-noise')
test(reference_dir, '/home/ariel/university/GPDM/scripts/outputs/reshuffle/SIGD16_64_300')
sfid = calculate_sifid_given_paths(reference_dir, '/home/ariel/university/GPDM/tests/downloaded_results/jpeg_100/SIGD16_GPNN', 1, False, 64)
print(sfid)
sfid = calculate_sifid_given_paths(reference_dir, '/home/ariel/university/GPDM/tests/downloaded_results/jpeg_100/SIGD16_SINGAN', 1, False, 64)
print(sfid)
reference_dir = '/home/ariel/university/GPDM/images/Places50'
test(reference_dir, '/home/ariel/university/Efficient-GPNN/scripts/outputs/reshuffle/Places50_alpha=1')
test(reference_dir, '/home/ariel/university/Efficient-GPNN/scripts/outputs/reshuffle/Places50_alpha=0.005')
test(reference_dir, '/home/ariel/university/Efficient-GPNN/scripts/outputs/reshuffle/Places50_faissIVF-50')
test(reference_dir, '/home/ariel/university/Efficient-GPNN/scripts/outputs/reshuffle/Places50_target_alpha=1')
test(reference_dir, '/home/ariel/university/Efficient-GPNN/scripts/outputs/reshuffle/Places50_target_alpha=0.005')
test(reference_dir, '/home/ariel/university/Efficient-GPNN/scripts/outputs/reshuffle/Places50_target_faissIVF-50')
test(reference_dir, '/home/ariel/university/GPDM/scripts/outputs/reshuffle/Places50_28-noise')
test(reference_dir, '/home/ariel/university/GPDM/scripts/outputs/reshuffle/Places50_64_300')
sfid = calculate_sifid_given_paths(reference_dir, '/home/ariel/university/GPDM/tests/downloaded_results/jpeg_100/Places50_GPNN_high_var', 1, False, 64)
print(sfid)
sfid = calculate_sifid_given_paths(reference_dir, '/home/ariel/university/GPDM/tests/downloaded_results/jpeg_100/Places50_SINGAN_high_var', 1, False, 64)
print(sfid)
| 59.701754
| 157
| 0.784014
| 462
| 3,403
| 5.536797
| 0.166667
| 0.117279
| 0.137608
| 0.18061
| 0.81939
| 0.764269
| 0.764269
| 0.704457
| 0.704457
| 0.704457
| 0
| 0.038635
| 0.087276
| 3,403
| 57
| 158
| 59.701754
| 0.784932
| 0.032031
| 0
| 0.1
| 0
| 0.125
| 0.549347
| 0.536593
| 0
| 0
| 0
| 0
| 0
| 1
| 0.025
| false
| 0
| 0.1
| 0
| 0.125
| 0.15
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
07662750f18099ecc05939fedb5ee3e7c95a054b
| 16,458
|
py
|
Python
|
tests/test_unimib_loader.py
|
1K5KKKKKei/sensorutils
|
675edd0f5a8396eb347635ef8219b1d038b19ae8
|
[
"MIT"
] | 8
|
2021-06-12T13:49:26.000Z
|
2022-02-28T16:05:53.000Z
|
tests/test_unimib_loader.py
|
1K5KKKKKei/sensorutils
|
675edd0f5a8396eb347635ef8219b1d038b19ae8
|
[
"MIT"
] | 15
|
2021-06-12T16:30:30.000Z
|
2021-12-11T13:50:01.000Z
|
tests/test_unimib_loader.py
|
1K5KKKKKei/sensorutils
|
675edd0f5a8396eb347635ef8219b1d038b19ae8
|
[
"MIT"
] | 5
|
2021-06-22T03:51:48.000Z
|
2021-07-12T08:08:22.000Z
|
import sys
import unittest
import numpy as np
import pandas as pd
import itertools
from pathlib import Path
sys.path.append('../src/')
from sensorutils.datasets.unimib import UniMib, load, load_raw, reformat
class UniMibTest(unittest.TestCase):
path = None
@classmethod
def setUpClass(cls) -> None:
if cls.path is None:
raise RuntimeError('dataset path is not specified')
def setUp(self):
self.loader = UniMib(self.path)
def tearDown(self):
pass
def test_load_fn(self):
def _check_common(self, data, meta, dtype):
# compare between data and meta
self.assertEqual(len(data), len(meta))
# meta
## type check
self.assertIsInstance(meta, pd.DataFrame)
## shape and column check
if dtype in ['full', 'adl', 'fall']:
self.assertSetEqual(set(meta.columns), set(['activity', 'subject', 'trial_id']))
elif dtype == 'raw':
self.assertSetEqual(set(meta.columns), set(['activity', 'subject', 'trial_id', 'gender', 'age', 'height', 'weight']))
else:
self.fail(f'Unexpected case, dtype: {dtype}')
## data type check
# flags_dtype = [dt == np.dtype(np.int8) or dt == np.dtype(np.int16) or dt == np.dtype(np.int32) or dt == np.dtype(np.int64) for dt in meta.dtypes]
flags_dtype = [dt == np.dtype(np.int8) for dt in meta.dtypes]
self.assertTrue(all(flags_dtype))
## data check
if dtype == 'full':
self.assertSetEqual(set(np.unique(meta['activity'])), set(range(1, 17+1)))
self.assertSetEqual(set(np.unique(meta['subject'])), set(range(1, 30+1)))
self.assertSetEqual(set(np.unique(meta['trial_id'])), set(range(1, 6+1)))
elif dtype == 'adl':
self.assertSetEqual(set(np.unique(meta['activity'])), set(range(1, 9+1)))
self.assertSetEqual(set(np.unique(meta['subject'])), set(range(1, 30+1)))
self.assertSetEqual(set(np.unique(meta['trial_id'])), set(range(1, 2+1)))
elif dtype == 'fall':
self.assertSetEqual(set(np.unique(meta['activity'])), set(range(1, 8+1)))
self.assertSetEqual(set(np.unique(meta['subject'])), set(range(1, 30+1)))
self.assertSetEqual(set(np.unique(meta['trial_id'])), set(range(1, 6+1)))
elif dtype == 'raw':
self.assertSetEqual(set(np.unique(meta['activity'])), set(range(1, 17+1)))
self.assertSetEqual(set(np.unique(meta['subject'])), set(range(1, 30+1)))
self.assertSetEqual(set(np.unique(meta['trial_id'])), set(range(1, 6+1)))
else:
self.fail(f'Unexpected case, dtype: {dtype}')
# data
## type check
self.assertIsInstance(data, list)
self.assertTrue(all(isinstance(d, pd.DataFrame) for d in data))
## shape check
flgs_shape = [len(d.shape) == 2 for d in data]
self.assertTrue(all(flgs_shape))
flgs_shape_ax1 = [d.shape[1] == 3 for d in data]
self.assertTrue(all(flgs_shape_ax1))
self.assertTrue(all(
[set(d.columns) == set(['x', 'y', 'z']) for d in data]
))
data_types = ['full', 'adl', 'fall']
for dtype in data_types:
with self.subTest(f'data type: {dtype}'):
data, meta = load(self.path, data_type=dtype)
_check_common(self, data, meta, dtype)
# data
## data check
sizes_seg = set(len(d) for d in data)
self.assertEqual(len(sizes_seg), 1)
with self.subTest('data type: raw'):
data, meta = load(self.path, data_type='raw')
_check_common(self, data, meta, 'raw')
def test_load_raw_fn(self):
def _check_common(self, data, meta, dtype):
# compare between data and meta
self.assertEqual(len(data), len(meta))
# meta
## type check
self.assertIsInstance(meta, pd.DataFrame)
if dtype in ['full', 'adl', 'fall']:
## meta - shape and column check
self.assertSetEqual(set(meta.columns), set(['activity', 'subject', 'trial_id']))
## data - type check
self.assertIsInstance(data, np.ndarray)
self.assertTrue(all(isinstance(d, np.ndarray) for d in data))
elif dtype == 'raw':
## meta - shape and column check
self.assertSetEqual(set(meta.columns), set(['activity', 'subject', 'trial_id', 'gender', 'age', 'height', 'weight']))
## data - type check
self.assertIsInstance(data, list)
self.assertTrue(all(isinstance(d, np.ndarray) for d in data))
else:
self.fail(f'Unexpected case, dtype: {dtype}')
## data type check
# flags_dtype = [dt == np.dtype(np.int8) or dt == np.dtype(np.int16) or dt == np.dtype(np.int32) or dt == np.dtype(np.int64) for dt in meta.dtypes]
flags_dtype = [dt == np.dtype(np.int8) for dt in meta.dtypes]
self.assertTrue(all(flags_dtype))
## data check
if dtype == 'full':
self.assertSetEqual(set(np.unique(meta['activity'])), set(range(1, 17+1)))
self.assertSetEqual(set(np.unique(meta['subject'])), set(range(1, 30+1)))
self.assertSetEqual(set(np.unique(meta['trial_id'])), set(range(1, 6+1)))
elif dtype == 'adl':
self.assertSetEqual(set(np.unique(meta['activity'])), set(range(1, 9+1)))
self.assertSetEqual(set(np.unique(meta['subject'])), set(range(1, 30+1)))
self.assertSetEqual(set(np.unique(meta['trial_id'])), set(range(1, 2+1)))
elif dtype == 'fall':
self.assertSetEqual(set(np.unique(meta['activity'])), set(range(1, 8+1)))
self.assertSetEqual(set(np.unique(meta['subject'])), set(range(1, 30+1)))
self.assertSetEqual(set(np.unique(meta['trial_id'])), set(range(1, 6+1)))
elif dtype == 'raw':
self.assertSetEqual(set(np.unique(meta['activity'])), set(range(1, 17+1)))
self.assertSetEqual(set(np.unique(meta['subject'])), set(range(1, 30+1)))
self.assertSetEqual(set(np.unique(meta['trial_id'])), set(range(1, 6+1)))
else:
self.fail(f'Unexpected case, dtype: {dtype}')
# data
## type check
if dtype in ['full', 'adl', 'fall']:
self.assertIsInstance(data, np.ndarray)
elif dtype == 'raw':
self.assertIsInstance(data, list)
self.assertTrue(all(isinstance(d, np.ndarray) for d in data))
else:
self.fail(f'Unexpected case, dtype: {dtype}')
## shape check
flgs_shape = [len(d.shape) == 2 for d in data]
self.assertTrue(all(flgs_shape))
flgs_shape_ax1 = [d.shape[0] == 3 for d in data] # different point
self.assertTrue(all(flgs_shape_ax1))
data_types = ['full', 'adl', 'fall']
for dtype in data_types:
with self.subTest(f'data type: {dtype}'):
raw = load_raw(self.path, data_type=dtype)
## raw - type check
self.assertIsInstance(raw, tuple)
self.assertEqual(len(raw), 2)
data, meta = raw
_check_common(self, data, meta, dtype)
# data
## data check
sizes_seg = set(len(d) for d in data)
self.assertEqual(len(sizes_seg), 1)
with self.subTest('data type: raw'):
raw = load_raw(self.path, data_type='raw')
## raw - type check
self.assertIsInstance(raw, tuple)
self.assertEqual(len(raw), 2)
data, meta = raw
_check_common(self, data, meta, 'raw')
def test_reformat_fn(self):
def _check_common(self, data, meta, dtype):
# compare between data and meta
self.assertEqual(len(data), len(meta))
# meta
## type check
self.assertIsInstance(meta, pd.DataFrame)
## shape and column check
if dtype in ['full', 'adl', 'fall']:
self.assertSetEqual(set(meta.columns), set(['activity', 'subject', 'trial_id']))
elif dtype == 'raw':
self.assertSetEqual(set(meta.columns), set(['activity', 'subject', 'trial_id', 'gender', 'age', 'height', 'weight']))
else:
self.fail(f'Unexpected case, dtype: {dtype}')
## data type check
# flags_dtype = [dt == np.dtype(np.int8) or dt == np.dtype(np.int16) or dt == np.dtype(np.int32) or dt == np.dtype(np.int64) for dt in meta.dtypes]
flags_dtype = [dt == np.dtype(np.int8) for dt in meta.dtypes]
self.assertTrue(all(flags_dtype))
## data check
if dtype == 'full':
self.assertSetEqual(set(np.unique(meta['activity'])), set(range(1, 17+1)))
self.assertSetEqual(set(np.unique(meta['subject'])), set(range(1, 30+1)))
self.assertSetEqual(set(np.unique(meta['trial_id'])), set(range(1, 6+1)))
elif dtype == 'adl':
self.assertSetEqual(set(np.unique(meta['activity'])), set(range(1, 9+1)))
self.assertSetEqual(set(np.unique(meta['subject'])), set(range(1, 30+1)))
self.assertSetEqual(set(np.unique(meta['trial_id'])), set(range(1, 2+1)))
elif dtype == 'fall':
self.assertSetEqual(set(np.unique(meta['activity'])), set(range(1, 8+1)))
self.assertSetEqual(set(np.unique(meta['subject'])), set(range(1, 30+1)))
self.assertSetEqual(set(np.unique(meta['trial_id'])), set(range(1, 6+1)))
elif dtype == 'raw':
self.assertSetEqual(set(np.unique(meta['activity'])), set(range(1, 17+1)))
self.assertSetEqual(set(np.unique(meta['subject'])), set(range(1, 30+1)))
self.assertSetEqual(set(np.unique(meta['trial_id'])), set(range(1, 6+1)))
else:
self.fail(f'Unexpected case, dtype: {dtype}')
# data
## type check
self.assertIsInstance(data, list)
self.assertTrue(all(isinstance(d, pd.DataFrame) for d in data))
## shape check
flgs_shape = [len(d.shape) == 2 for d in data]
self.assertTrue(all(flgs_shape))
flgs_shape_ax1 = [d.shape[1] == 3 for d in data] # different point
self.assertTrue(all(flgs_shape_ax1))
self.assertTrue(all(
[set(d.columns) == set(['x', 'y', 'z']) for d in data]
))
data_types = ['full', 'adl', 'fall']
for dtype in data_types:
with self.subTest(f'data type: {dtype}'):
raw = load_raw(self.path, data_type=dtype)
data, meta = reformat(raw)
_check_common(self, data, meta, dtype)
## data - data type check
sizes_seg = set(len(d) for d in data)
self.assertEqual(len(sizes_seg), 1)
with self.subTest('data type: raw'):
raw = load_raw(self.path, data_type='raw')
data, meta = reformat(raw)
_check_common(self, data, meta, 'raw')
def test_unimib_load_method_framing(self):
data_types = ['full', 'adl', 'fall']
for dtype in data_types:
with self.subTest(f'data type: {dtype}'):
x, y = self.loader.load(data_type=dtype, subjects=None)
## compare between x and y
self.assertEqual(len(x), len(y))
## type check
self.assertIsInstance(x, np.ndarray)
self.assertIsInstance(y, np.ndarray)
## data type check
self.assertEqual(x.dtype, np.dtype(np.float64))
self.assertEqual(y.dtype, np.dtype(np.int8))
## shape check
self.assertEqual(len(x.shape), 3)
self.assertTupleEqual(x.shape[1:], (3, 151))
self.assertEqual(len(y.shape), 2)
self.assertEqual(y.shape[1], 2)
## data check
if dtype == 'full':
self.assertSetEqual(set(np.unique(y[:, 0])), set(range(1, 17+1))) # activity
elif dtype == 'adl':
self.assertSetEqual(set(np.unique(y[:, 0])), set(range(1, 9+1))) # activity
elif dtype == 'fall':
self.assertSetEqual(set(np.unique(y[:, 0])), set(range(1, 8+1))) # activity
self.assertSetEqual(set(np.unique(y[:, 1])), set(range(1, 30+1))) # subject
for stride, ws in itertools.product([64, 128, 256, 512], [64, 128, 256, 512]):
with self.subTest(f'window size: {ws}, data type: raw'):
x, y = self.loader.load(data_type='raw', window_size=ws, stride=stride, ftrim_sec=2, btrim_sec=2, subjects=None)
## compare between x and y
self.assertEqual(len(x), len(y))
## type check
self.assertIsInstance(x, np.ndarray)
self.assertIsInstance(y, np.ndarray)
## data type check
self.assertEqual(x.dtype, np.dtype(np.float64))
self.assertEqual(y.dtype, np.dtype(np.int8))
## shape check
self.assertEqual(len(x.shape), 3)
self.assertTupleEqual(x.shape[1:], (3, ws))
self.assertEqual(len(y.shape), 2)
self.assertEqual(y.shape[1], 2)
## data check
### window_sizeが大きいとラベルば網羅されない可能性がある
### ftrim_sec(btrim_sec)が大きいとfallラベルが入らなくなる可能性がある
if ws == 64 and stride == 64:
self.assertSetEqual(set(np.unique(y[:, 0])), set(range(1, 17+1))) # activity
self.assertSetEqual(set(np.unique(y[:, 1])), set(range(1, 30+1))) # subject
def test_unimib_load_method_filed_subjects(self):
data_types = ['full', 'adl', 'fall']
patterns = [
[3, 30, 14, 11, 28, 26, 23, 12, 18, 24, 6, 17, 9],
[14, 7, 26, 9, 6, 21, 30, 13, 12, 3, 27, 22, 5, 11, 28, 17],
[22, 27, 13, 29, 24, 28, 30],
[11, 7, 30, 13, 8, 20, 27, 29, 23, 12, 5, 3, 2, 25, 19, 1, 26, 10, 6, 18, 17],
[14, 13, 8, 16, 6, 26, 29, 18, 23, 9, 7, 17, 5, 2, 28, 30, 20, 21, 15, 27, 22, 4],
[7, 15, 16, 8, 2, 10, 21, 6, 22, 13, 1, 24, 5, 20, 23, 11, 18, 19, 28, 17],
[19, 25, 12, 4, 11, 24, 6, 16, 13, 18, 27, 3, 10, 28, 21, 8, 14, 7, 5, 1, 9, 2, 20, 29, 23],
[15, 14, 18, 10, 23, 2, 8, 21, 3, 26, 20, 17, 22, 24, 5, 9, 27, 1, 12, 6, 29, 7, 13, 11, 28],
[7, 9, 5, 21, 20, 16, 17, 1, 18, 3, 2, 6, 22, 28, 14, 12, 11, 25, 23, 13, 19, 15, 29, 24, 26, 27, 10, 8, 30, 4],
[15, 7, 10, 1, 21, 19, 11, 29, 20, 12, 14, 16],
]
for i, subjects in enumerate(patterns):
for dtype in data_types:
with self.subTest(f'pattern {i}, data type: {dtype}'):
_, y = self.loader.load(data_type=dtype, window_size=256, stride=256, ftrim_sec=5, btrim_sec=5, subjects=subjects)
## data check
self.assertSetEqual(set(np.unique(y[:, 1])), set(subjects))
with self.subTest(f'pattern {i}, data type: raw'):
_, y = self.loader.load(data_type=dtype, window_size=256, stride=256, ftrim_sec=5, btrim_sec=5, subjects=subjects)
## data check
self.assertSetEqual(set(np.unique(y[:, 1])), set(subjects))
if __name__ == '__main__':
args = sys.argv
if len(args) != 2:
sys.stderr.write('Usage: {} <dataset path>'.format(args[0]))
sys.exit(1)
ds_path = Path(args[1])
UniMibTest.path = ds_path
unittest.main(verbosity=2, argv=args[0:1])
| 45.090411
| 159
| 0.52716
| 2,111
| 16,458
| 4.046424
| 0.082899
| 0.105362
| 0.122922
| 0.118473
| 0.861859
| 0.851908
| 0.841957
| 0.829197
| 0.817841
| 0.795832
| 0
| 0.053412
| 0.319723
| 16,458
| 364
| 160
| 45.214286
| 0.709539
| 0.079232
| 0
| 0.763265
| 0
| 0
| 0.076949
| 0
| 0
| 0
| 0
| 0
| 0.420408
| 1
| 0.044898
| false
| 0.004082
| 0.028571
| 0
| 0.081633
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ab0c284158a660c93c77983f2a66010635d071e2
| 2,514
|
py
|
Python
|
ConditionalStatement/SkiTrip.py
|
Rohitm619/Softuni-Python-Basic
|
03c9d0b44f5652c99db3b0e42014dd5af50205a2
|
[
"MIT"
] | 1
|
2020-09-22T13:25:34.000Z
|
2020-09-22T13:25:34.000Z
|
ConditionalStatement/SkiTrip.py
|
Rohitm619/Softuni-Python-Basic
|
03c9d0b44f5652c99db3b0e42014dd5af50205a2
|
[
"MIT"
] | null | null | null |
ConditionalStatement/SkiTrip.py
|
Rohitm619/Softuni-Python-Basic
|
03c9d0b44f5652c99db3b0e42014dd5af50205a2
|
[
"MIT"
] | 1
|
2020-10-17T09:27:46.000Z
|
2020-10-17T09:27:46.000Z
|
days_rest = int(input())
kind_of_room = input()
grade = input()
nights = days_rest - 1
if kind_of_room == "room for one person":
price = nights * 18.00
if grade == "positive":
last_price = price + price * 25 / 100
print(f"{last_price:.2f}")
elif grade == "negative":
last_price = price - price * 10 / 100
print(f"{last_price:.2f}")
elif kind_of_room == "apartment":
price = nights * 25.00
if nights <= 10:
prom_price = price - price * 30 / 100
if grade == "positive":
last_price = prom_price + prom_price * 25 / 100
print(f"{last_price:.2f}")
elif grade == "negative":
last_price = prom_price - prom_price * 10 / 100
print(f"{last_price:.2f}")
elif 10 <= nights <= 15:
prom_price = price - price * 45 / 100
if grade == "positive":
last_price = prom_price + prom_price * 25 / 100
print(f"{last_price:.2f}")
elif grade == "negative":
last_price = prom_price - prom_price * 10 / 100
print(f"{last_price:.2f}")
elif nights > 15:
prom_price = price - price * 50 / 100
if grade == "positive":
last_price = prom_price + prom_price * 25 / 100
print(f"{last_price:.2f}")
elif grade == "negative":
last_price = prom_price - prom_price * 10 / 100
print(f"{last_price:.2f}")
elif kind_of_room == "president apartment":
price = nights * 35.00
if nights <= 10:
prom_price = price - price * 10 / 100
if grade == "positive":
last_price = prom_price + prom_price * 25 / 100
print(f"{last_price:.2f}")
elif grade == "negative":
last_price = prom_price - prom_price * 10 / 100
print(f"{last_price:.2f}")
elif 10 <= nights <= 15:
prom_price = price - price * 15 / 100
if grade == "positive":
last_price = prom_price + prom_price * 25 / 100
print(f"{last_price:.2f}")
elif grade == "negative":
last_price = prom_price - prom_price * 10 / 100
print(f"{last_price:.2f}")
elif nights > 15:
prom_price = price - price * 20 / 100
if grade == "positive":
last_price = prom_price + prom_price * 25 / 100
print(f"{last_price:.2f}")
elif grade == "negative":
last_price = prom_price - prom_price * 10 / 100
print(f"{last_price:.2f}")
| 37.522388
| 59
| 0.541766
| 321
| 2,514
| 4.031153
| 0.11215
| 0.208655
| 0.25966
| 0.140649
| 0.870943
| 0.846986
| 0.846986
| 0.846986
| 0.799073
| 0.799073
| 0
| 0.085068
| 0.331344
| 2,514
| 66
| 60
| 38.090909
| 0.684711
| 0
| 0
| 0.71875
| 0
| 0
| 0.152347
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.21875
| 0
| 0
| 0
| null | 1
| 1
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
ab29cca6b8583b244a74c234579ca137fb7a4d3a
| 136
|
py
|
Python
|
src/dclick/__init__.py
|
AnOtterGithubUser/double_click
|
8f369d1e55046f56c61dff05e6b98ddcca371ef3
|
[
"BSD-3-Clause"
] | 3
|
2020-07-06T14:54:42.000Z
|
2020-10-19T17:03:18.000Z
|
src/dclick/__init__.py
|
AnOtterGithubUser/double_click
|
8f369d1e55046f56c61dff05e6b98ddcca371ef3
|
[
"BSD-3-Clause"
] | 2
|
2020-11-25T15:47:00.000Z
|
2020-11-26T13:49:54.000Z
|
src/dclick/__init__.py
|
AnOtterGithubUser/double_click
|
8f369d1e55046f56c61dff05e6b98ddcca371ef3
|
[
"BSD-3-Clause"
] | null | null | null |
# Enables to import command_with_config directly from dclick without the extra layers
from dclick.decorators import command_with_config
| 45.333333
| 85
| 0.867647
| 20
| 136
| 5.7
| 0.7
| 0.22807
| 0.298246
| 0.403509
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.117647
| 136
| 2
| 86
| 68
| 0.95
| 0.610294
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
ab3a40baab7522354e21bd9f549d3026c7a42995
| 25,737
|
py
|
Python
|
setup.py
|
likewatchk/python-pcl
|
2a66797719f1b5af7d6a0d0893f697b3786db461
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
likewatchk/python-pcl
|
2a66797719f1b5af7d6a0d0893f697b3786db461
|
[
"BSD-3-Clause"
] | null | null | null |
setup.py
|
likewatchk/python-pcl
|
2a66797719f1b5af7d6a0d0893f697b3786db461
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from __future__ import print_function
from collections import defaultdict
from Cython.Distutils import build_ext
from distutils.core import setup
from distutils.extension import Extension
# from Cython.Build import cythonize # MacOS NG
from setuptools import setup, find_packages, Extension
import subprocess
import numpy
import sys
import platform
import os
import time
import shutil
from ctypes.util import find_library
setup_requires = []
install_requires = [
'filelock',
'mock',
'nose',
# RuntimeWarning: numpy.dtype size changed, may indicate binary incompatibility
# https://github.com/scikit-image/scikit-image/issues/3655
# 'numpy>=1.15.1,!=1.50.0',
# numpy.ufunc size changed, may indicate binary incompatibility.
'numpy>=1.16.1,!=1.16.2',
'Cython>=0.26.0',
]
def pkgconfig(flag):
# Equivalent in Python 2.7 (but not 2.6):
# subprocess.check_output(['pkg-config', flag] + pcl_libs).split()
p = subprocess.Popen(['pkg-config', flag] +
pcl_libs, stdout=subprocess.PIPE)
stdout, _ = p.communicate()
# Assume no evil spaces in filenames; unsure how pkg-config would
# handle those, anyway.
# decode() is required in Python 3. TODO how do know the encoding?
return stdout.decode().split()
if platform.system() != "Windows":
# Not 'Windows'
if sys.platform == 'darwin':
os.environ['ARCHFLAGS'] = ''
# Try to find PCL. XXX we should only do this when trying to build or install.
PCL_SUPPORTED = ["-1.9", "-1.8", "-1.7", "-1.6", ""] # in order of preference
for pcl_version in PCL_SUPPORTED:
if subprocess.call(['pkg-config', 'pcl_common%s' % pcl_version]) == 0:
break
else:
print("%s: error: cannot find PCL, tried" %
sys.argv[0], file=sys.stderr)
for version in PCL_SUPPORTED:
print(' pkg-config pcl_common%s' % version, file=sys.stderr)
sys.exit(1)
# Find build/link options for PCL using pkg-config.
# version 1.6
# pcl_libs = ["common", "features", "filters", "io", "kdtree", "octree",
# "registration", "sample_consensus", "search", "segmentation",
# "surface", "tracking", "visualization"]
# version 1.7
if pcl_version == '-1.7':
pcl_libs = ["common", "features", "filters", "geometry",
"io", "kdtree", "keypoints", "octree", "outofcore", "people",
"recognition", "registration", "sample_consensus", "search",
"segmentation", "surface", "tracking", "visualization"]
else:
# version 1.8
pcl_libs = ["2d", "common", "features", "filters", "geometry",
"io", "kdtree", "keypoints", "ml", "octree", "outofcore", "people",
"recognition", "registration", "sample_consensus", "search",
"segmentation", "stereo", "surface", "tracking", "visualization"]
pcl_libs = ["pcl_%s%s" % (lib, pcl_version) for lib in pcl_libs]
ext_args = defaultdict(list)
ext_args['include_dirs'].append(numpy.get_include())
for flag in pkgconfig('--cflags-only-I'):
ext_args['include_dirs'].append(flag[2:])
# OpenNI?
# "-I/usr/include/openni"
# "-I/usr/include/openni"
# /usr/include/ni
ext_args['include_dirs'].append('/usr/include/ni')
# ext_args['library_dirs'].append()
# ext_args['libraries'].append()
# OpenNI2
ext_args['include_dirs'].append('/usr/include/openni2')
# VTK use
if sys.platform == 'darwin':
# pcl 1.8.1(MacOSX)
# if pcl_version == '-1.8':
# vtk_version = '8.0'
# ext_args['include_dirs'].append('/usr/local/include/vtk-' + vtk_version)
# ext_args['library_dirs'].append('/usr/local/lib')
# ext_args['include_dirs'].append('/usr/local/Cellar/vtk/8.0.1/include')
# ext_args['library_dirs'].append('/usr/local/Cellar/vtk/8.0.1/lib')
if pcl_version == '-1.9':
# pcl 1.9.1
# build install?
# vtk_version = '8.1'
# vtk_include_dir = os.path.join('/usr/local' ,'include/vtk-8.1')
# vtk_library_dir = os.path.join('/usr/local', 'lib')
# homebrew(MacOSX homebrew)
# (pcl 1.9.1_3)
# vtk_version = '8.1.2_3'
# vtk_include_dir = os.path.join('/usr/local/Cellar/vtk', vtk_version ,'include/vtk-8.2')
# 2019/05/08 check(pcl 1.9.1_4)
vtk_version = '8.2.0'
vtk_include_dir = os.path.join('/usr/local/Cellar/vtk', vtk_version ,'include/vtk-8.2')
vtk_library_dir = os.path.join('/usr/local/Cellar/vtk', vtk_version, 'lib')
pass
else:
# pcl 1.7.0?(Ubuntu 14.04)
# vtk_version = '5.8'
# ext_args['include_dirs'].append('/usr/include/vtk-' + vtk_version)
# ext_args['library_dirs'].append('/usr/lib')
# pcl 1.7.2(Ubuntu 16.04)(xenial)
if pcl_version == '-1.7':
vtk_version = '6.2'
vtk_include_dir = os.path.join('/usr/include/vtk-' + vtk_version)
vtk_library_dir = os.path.join('/usr/lib')
elif pcl_version == '-1.8':
# pcl 1.8.0/1?(Ubuntu 18.04)(melodic)
vtk_version = '7.1'
# pcl 1.8.1?
# vtk_version = '8.0'
vtk_include_dir = os.path.join('/usr/local/include/vtk-' + vtk_version)
vtk_library_dir = os.path.join('/usr/local/lib')
elif pcl_version == '-1.9':
# pcl 1.9.1
# build install?
vtk_version = '8.1'
vtk_include_dir = os.path.join('/usr/include/vtk-' + vtk_version)
vtk_library_dir = os.path.join('/usr/lib')
else:
pass
# other
# pcl 1.9.1(Conda)
# vtk_version = '8.1'
# vtk_include_dir = os.path.join(os.environ["PREFIX"] ,'include/vtk-8.1')
# vtk_library_dir = os.path.join(os.environ["PREFIX"], 'lib')
ext_args['include_dirs'].append(vtk_include_dir)
ext_args['library_dirs'].append(vtk_library_dir)
if vtk_version == '7.1':
# apt package?(vtk use OpenGL?)
vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkexpat-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkfreetype-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkhdf5-' + vtk_version, 'vtkhdf5_hl-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtkjpeg-' + vtk_version, 'vtkjsoncpp-' + vtk_version, 'vtklibxml2-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtkNetCDF-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkpng-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtktiff-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkzlib-' + vtk_version]
elif vtk_version == '8.0':
# vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkhdf5-' + vtk_version, 'vtkhdf5_hl-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtkjsoncpp-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtkNetCDF-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PSOpenGL2-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingOpenGL-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtktiff-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkzlib-' + vtk_version]
# apt package?(vtk use OpenGL?)
vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkDomainsChemistryOpenGL2-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersPoints-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersPython-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersTopology-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkglew-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOExportOpenGL2-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOTecplotTable-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtklibharu-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkPythonInterpreter-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingContextOpenGL2-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PS-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingMatplotlib-' + vtk_version, 'vtkRenderingOpenGL2-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL2-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkWrappingTools-' + vtk_version]
elif vtk_version == '8.1':
# pcl_version 1.9.1
# conda or build module, MacOS X
vtklibreleases = ['vtkalglib-' + vtk_version, 'vtkChartsCore-' + vtk_version, 'vtkCommonColor-' + vtk_version, 'vtkCommonComputationalGeometry-' + vtk_version, 'vtkCommonCore-' + vtk_version, 'vtkCommonDataModel-' + vtk_version, 'vtkCommonExecutionModel-' + vtk_version, 'vtkCommonMath-' + vtk_version, 'vtkCommonMisc-' + vtk_version, 'vtkCommonSystem-' + vtk_version, 'vtkCommonTransforms-' + vtk_version, 'vtkDICOMParser-' + vtk_version, 'vtkDomainsChemistry-' + vtk_version, 'vtkDomainsChemistryOpenGL2-' + vtk_version, 'vtkexoIIc-' + vtk_version, 'vtkFiltersAMR-' + vtk_version, 'vtkFiltersCore-' + vtk_version, 'vtkFiltersExtraction-' + vtk_version, 'vtkFiltersFlowPaths-' + vtk_version, 'vtkFiltersGeneral-' + vtk_version, 'vtkFiltersGeneric-' + vtk_version, 'vtkFiltersGeometry-' + vtk_version, 'vtkFiltersHybrid-' + vtk_version, 'vtkFiltersHyperTree-' + vtk_version, 'vtkFiltersImaging-' + vtk_version, 'vtkFiltersModeling-' + vtk_version, 'vtkFiltersParallel-' + vtk_version, 'vtkFiltersParallelImaging-' + vtk_version, 'vtkFiltersPoints-' + vtk_version, 'vtkFiltersProgrammable-' + vtk_version, 'vtkFiltersPython-' + vtk_version, 'vtkFiltersSelection-' + vtk_version, 'vtkFiltersSMP-' + vtk_version, 'vtkFiltersSources-' + vtk_version, 'vtkFiltersStatistics-' + vtk_version, 'vtkFiltersTexture-' + vtk_version, 'vtkFiltersTopology-' + vtk_version, 'vtkFiltersVerdict-' + vtk_version, 'vtkGeovisCore-' + vtk_version, 'vtkgl2ps-' + vtk_version, 'vtkglew-' + vtk_version, 'vtkImagingColor-' + vtk_version, 'vtkImagingCore-' + vtk_version, 'vtkImagingFourier-' + vtk_version, 'vtkImagingGeneral-' + vtk_version, 'vtkImagingHybrid-' + vtk_version, 'vtkImagingMath-' + vtk_version, 'vtkImagingMorphological-' + vtk_version, 'vtkImagingSources-' + vtk_version, 'vtkImagingStatistics-' + vtk_version, 'vtkImagingStencil-' + vtk_version, 'vtkInfovisCore-' + vtk_version, 'vtkInfovisLayout-' + vtk_version, 'vtkInteractionImage-' + vtk_version, 'vtkInteractionStyle-' + vtk_version, 'vtkInteractionWidgets-' + vtk_version, 'vtkIOAMR-' + vtk_version, 'vtkIOCore-' + vtk_version, 'vtkIOEnSight-' + vtk_version, 'vtkIOExodus-' + vtk_version, 'vtkIOExport-' + vtk_version, 'vtkIOExportOpenGL2-' + vtk_version, 'vtkIOGeometry-' + vtk_version, 'vtkIOImage-' + vtk_version, 'vtkIOImport-' + vtk_version, 'vtkIOInfovis-' + vtk_version, 'vtkIOLegacy-' + vtk_version, 'vtkIOLSDyna-' + vtk_version, 'vtkIOMINC-' + vtk_version, 'vtkIOMovie-' + vtk_version, 'vtkIONetCDF-' + vtk_version, 'vtkIOParallel-' + vtk_version, 'vtkIOParallelXML-' + vtk_version, 'vtkIOPLY-' + vtk_version, 'vtkIOSQL-' + vtk_version, 'vtkIOTecplotTable-' + vtk_version, 'vtkIOVideo-' + vtk_version, 'vtkIOXML-' + vtk_version, 'vtkIOXMLParser-' + vtk_version, 'vtklibharu-' + vtk_version, 'vtkmetaio-' + vtk_version, 'vtknetcdfcpp-' + vtk_version, 'vtkoggtheora-' + vtk_version, 'vtkParallelCore-' + vtk_version, 'vtkproj4-' + vtk_version, 'vtkPythonInterpreter-' + vtk_version, 'vtkRenderingAnnotation-' + vtk_version, 'vtkRenderingContext2D-' + vtk_version, 'vtkRenderingContextOpenGL2-' + vtk_version, 'vtkRenderingCore-' + vtk_version, 'vtkRenderingFreeType-' + vtk_version, 'vtkRenderingGL2PSOpenGL2-' + vtk_version, 'vtkRenderingImage-' + vtk_version, 'vtkRenderingLabel-' + vtk_version, 'vtkRenderingLOD-' + vtk_version, 'vtkRenderingMatplotlib-' + vtk_version, 'vtkRenderingOpenGL2-' + vtk_version, 'vtkRenderingVolume-' + vtk_version, 'vtkRenderingVolumeOpenGL2-' + vtk_version, 'vtksqlite-' + vtk_version, 'vtksys-' + vtk_version, 'vtkverdict-' + vtk_version, 'vtkViewsContext2D-' + vtk_version, 'vtkViewsCore-' + vtk_version, 'vtkViewsInfovis-' + vtk_version, 'vtkWrappingTools-' + vtk_version]
else:
vtklibreleases = []
for librelease in vtklibreleases:
ext_args['libraries'].append(librelease)
for flag in pkgconfig('--cflags-only-other'):
if flag.startswith('-D'):
macro, value = flag[2:].split('=', 1)
ext_args['define_macros'].append((macro, value))
else:
ext_args['extra_compile_args'].append(flag)
# clang?
# https://github.com/strawlab/python-pcl/issues/129
# gcc base libc++, clang base libstdc++
# ext_args['extra_compile_args'].append("-stdlib=libstdc++")
# ext_args['extra_compile_args'].append("-stdlib=libc++")
if sys.platform == 'darwin':
# not use gcc?
# ext_args['extra_compile_args'].append("-stdlib=libstdc++")
# clang(min : 10.7?/10.9?)
# minimum deployment target of OS X 10.9
ext_args['extra_compile_args'].append("-stdlib=libc++")
ext_args['extra_compile_args'].append("-mmacosx-version-min=10.9")
ext_args['extra_link_args'].append("-stdlib=libc++")
ext_args['extra_link_args'].append("-mmacosx-version-min=10.9")
# vtk error : not set override function error.
ext_args['extra_compile_args'].append("-std=c++11")
# mac os using openmp
# https://iscinumpy.gitlab.io/post/omp-on-high-sierra/
# before setting.
# $ brew install libomp
# ext_args['extra_compile_args'].append('-fopenmp -Xpreprocessor')
# ext_args['extra_link_args'].append('-fopenmp -Xpreprocessor -lomp')
pass
else:
ext_args['extra_compile_args'].append("-std=c++11")
ext_args['library_dirs'].append("/usr/lib/x86_64-linux-gnu/")
# gcc? use standard library
# ext_args['extra_compile_args'].append("-stdlib=libstdc++")
# ext_args['extra_link_args'].append("-stdlib=libstdc++")
# clang use standard library
# ext_args['extra_compile_args'].append("-stdlib=libc++")
# ext_args['extra_link_args'].append("-stdlib=libc++")
# using openmp
# ext_args['extra_compile_args'].append('-fopenmp')
# ext_args['extra_link_args'].append('-fopenmp')
pass
for flag in pkgconfig('--libs-only-l'):
if flag == "-lflann_cpp-gd":
print(
"skipping -lflann_cpp-gd (see https://github.com/strawlab/python-pcl/issues/29")
continue
ext_args['libraries'].append(flag[2:])
for flag in pkgconfig('--libs-only-L'):
ext_args['library_dirs'].append(flag[2:])
for flag in pkgconfig('--libs-only-other'):
ext_args['extra_link_args'].append(flag)
# grabber?
# -lboost_system
# ext_args['extra_link_args'].append('-lboost_system')
# MacOSX?
# ext_args['extra_link_args'].append('-lboost_system_mt')
# ext_args['extra_link_args'].append('-lboost_bind')
# Fix compile error on Ubuntu 12.04 (e.g., Travis-CI).
ext_args['define_macros'].append(
("EIGEN_YES_I_KNOW_SPARSE_MODULE_IS_NOT_STABLE_YET", "1"))
if pcl_version == '-1.8':
module = [Extension("pcl._pcl", ["pcl/_pcl_180.pyx", "pcl/minipcl.cpp", "pcl/ProjectInliers.cpp"], language="c++", **ext_args),
Extension("pcl.pcl_visualization", ["pcl/pcl_visualization.pyx"], language="c++", **ext_args),
# Extension("pcl.pcl_grabber", ["pcl/pcl_grabber.pyx", "pcl/grabber_callback.cpp"], language="c++", **ext_args),
# debug
# gdb_debug=True,
]
elif pcl_version == '-1.9':
module = [Extension("pcl._pcl", ["pcl/_pcl_190.pyx", "pcl/minipcl.cpp", "pcl/ProjectInliers.cpp"], language="c++", **ext_args),
Extension("pcl.pcl_visualization", ["pcl/pcl_visualization.pyx"], language="c++", **ext_args),
# Extension("pcl.pcl_grabber", ["pcl/pcl_grabber.pyx", "pcl/grabber_callback.cpp"], language="c++", **ext_args),
# debug
# gdb_debug=True,
]
else:
print('no pcl install or pkg-config missed.')
sys.exit(1)
listDlls = []
data_files = None
setup(name='python-pcl',
description='Python bindings for the Point Cloud Library (PCL). using Cython.',
url='http://github.com/strawlab/python-pcl',
version='0.3.0rc1',
author='John Stowers',
author_email='john.stowers@gmail.com',
maintainer='Tooru Oonuma',
maintainer_email='t753github@gmail.com',
license='BSD',
packages=[
"pcl",
# "pcl.pcl_visualization",
],
zip_safe=False,
setup_requires=setup_requires,
install_requires=install_requires,
classifiers=[
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
],
tests_require=['mock', 'nose'],
ext_modules=module,
cmdclass={'build_ext': build_ext},
data_files=data_files
)
| 88.443299
| 3,675
| 0.678673
| 2,687
| 25,737
| 6.251954
| 0.164868
| 0.257158
| 0.015715
| 0.01006
| 0.820882
| 0.787309
| 0.770105
| 0.728793
| 0.71159
| 0.689029
| 0
| 0.012124
| 0.16995
| 25,737
| 290
| 3,676
| 88.748276
| 0.774236
| 0.282395
| 0
| 0.210191
| 0
| 0.006369
| 0.384407
| 0.081839
| 0
| 0
| 0
| 0.003448
| 0
| 1
| 0.006369
| false
| 0.025478
| 0.10828
| 0
| 0.121019
| 0.031847
| 0
| 0
| 0
| null | 1
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
ab4e3a59abdd7e28139e3b3682c86743ec6f8a5c
| 228,390
|
py
|
Python
|
src/plugins/gbp/test/test_gbp.py
|
adwait1-G/vpp
|
5f9f3c8de8d5d9f83a437661a98e5cc2453705f9
|
[
"Apache-2.0"
] | null | null | null |
src/plugins/gbp/test/test_gbp.py
|
adwait1-G/vpp
|
5f9f3c8de8d5d9f83a437661a98e5cc2453705f9
|
[
"Apache-2.0"
] | null | null | null |
src/plugins/gbp/test/test_gbp.py
|
adwait1-G/vpp
|
5f9f3c8de8d5d9f83a437661a98e5cc2453705f9
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python3
from socket import AF_INET, AF_INET6
import unittest
from ipaddress import ip_address, IPv4Network, IPv6Network
from scapy.packet import Raw
from scapy.layers.l2 import Ether, ARP, Dot1Q
from scapy.layers.inet import IP, UDP, ICMP
from scapy.layers.inet6 import IPv6, ICMPv6ND_NS, ICMPv6NDOptSrcLLAddr, \
ICMPv6ND_NA, ICMPv6EchoRequest
from scapy.utils6 import in6_getnsma, in6_getnsmac
from scapy.layers.vxlan import VXLAN
from scapy.data import ETH_P_IP, ETH_P_IPV6, ETH_P_ARP
from scapy.utils import inet_pton, inet_ntop
from framework import VppTestCase, VppTestRunner
from vpp_object import VppObject
from vpp_interface import VppInterface
from vpp_ip_route import VppIpRoute, VppRoutePath, VppIpTable, \
VppIpInterfaceAddress, VppIpInterfaceBind, find_route, FibPathProto, \
FibPathType
from vpp_l2 import VppBridgeDomain, VppBridgeDomainPort, \
VppBridgeDomainArpEntry, VppL2FibEntry, find_bridge_domain_port, VppL2Vtr
from vpp_sub_interface import L2_VTR_OP, VppDot1QSubint
from vpp_ip import DpoProto, get_dpo_proto
from vpp_papi import VppEnum, MACAddress
from vpp_vxlan_gbp_tunnel import find_vxlan_gbp_tunnel, INDEX_INVALID, \
VppVxlanGbpTunnel
from vpp_neighbor import VppNeighbor
try:
text_type = unicode
except NameError:
text_type = str
NUM_PKTS = 67
def find_gbp_endpoint(test, sw_if_index=None, ip=None, mac=None,
tep=None, sclass=None):
if ip:
vip = ip
if mac:
vmac = MACAddress(mac)
eps = test.vapi.gbp_endpoint_dump()
for ep in eps:
if tep:
src = tep[0]
dst = tep[1]
if src != str(ep.endpoint.tun.src) or \
dst != str(ep.endpoint.tun.dst):
continue
if sw_if_index:
if ep.endpoint.sw_if_index != sw_if_index:
continue
if sclass:
if ep.endpoint.sclass != sclass:
continue
if ip:
for eip in ep.endpoint.ips:
if vip == str(eip):
return True
if mac:
if vmac == ep.endpoint.mac:
return True
return False
def find_gbp_vxlan(test, vni):
ts = test.vapi.gbp_vxlan_tunnel_dump()
for t in ts:
if t.tunnel.vni == vni:
return True
return False
class VppGbpEndpoint(VppObject):
"""
GBP Endpoint
"""
@property
def mac(self):
return str(self.vmac)
@property
def ip4(self):
return self._ip4
@property
def fip4(self):
return self._fip4
@property
def ip6(self):
return self._ip6
@property
def fip6(self):
return self._fip6
@property
def ips(self):
return [self.ip4, self.ip6]
@property
def fips(self):
return [self.fip4, self.fip6]
def __init__(self, test, itf, epg, recirc, ip4, fip4, ip6, fip6,
flags=0,
tun_src="0.0.0.0",
tun_dst="0.0.0.0",
mac=True):
self._test = test
self.itf = itf
self.epg = epg
self.recirc = recirc
self._ip4 = ip4
self._fip4 = fip4
self._ip6 = ip6
self._fip6 = fip6
if mac:
self.vmac = MACAddress(self.itf.remote_mac)
else:
self.vmac = MACAddress("00:00:00:00:00:00")
self.flags = flags
self.tun_src = tun_src
self.tun_dst = tun_dst
def add_vpp_config(self):
res = self._test.vapi.gbp_endpoint_add(
self.itf.sw_if_index,
[self.ip4, self.ip6],
self.vmac.packed,
self.epg.sclass,
self.flags,
self.tun_src,
self.tun_dst)
self.handle = res.handle
self._test.registry.register(self, self._test.logger)
def remove_vpp_config(self):
self._test.vapi.gbp_endpoint_del(self.handle)
def object_id(self):
return "gbp-endpoint:[%d==%d:%s:%d]" % (self.handle,
self.itf.sw_if_index,
self.ip4,
self.epg.sclass)
def query_vpp_config(self):
return find_gbp_endpoint(self._test,
self.itf.sw_if_index,
self.ip4)
class VppGbpRecirc(VppObject):
"""
GBP Recirculation Interface
"""
def __init__(self, test, epg, recirc, is_ext=False):
self._test = test
self.recirc = recirc
self.epg = epg
self.is_ext = is_ext
def add_vpp_config(self):
self._test.vapi.gbp_recirc_add_del(
1,
self.recirc.sw_if_index,
self.epg.sclass,
self.is_ext)
self._test.registry.register(self, self._test.logger)
def remove_vpp_config(self):
self._test.vapi.gbp_recirc_add_del(
0,
self.recirc.sw_if_index,
self.epg.sclass,
self.is_ext)
def object_id(self):
return "gbp-recirc:[%d]" % (self.recirc.sw_if_index)
def query_vpp_config(self):
rs = self._test.vapi.gbp_recirc_dump()
for r in rs:
if r.recirc.sw_if_index == self.recirc.sw_if_index:
return True
return False
class VppGbpExtItf(VppObject):
"""
GBP ExtItfulation Interface
"""
def __init__(self, test, itf, bd, rd, anon=False):
self._test = test
self.itf = itf
self.bd = bd
self.rd = rd
self.flags = 1 if anon else 0
def add_vpp_config(self):
self._test.vapi.gbp_ext_itf_add_del(
1, self.itf.sw_if_index, self.bd.bd_id, self.rd.rd_id, self.flags)
self._test.registry.register(self, self._test.logger)
def remove_vpp_config(self):
self._test.vapi.gbp_ext_itf_add_del(
0, self.itf.sw_if_index, self.bd.bd_id, self.rd.rd_id, self.flags)
def object_id(self):
return "gbp-ext-itf:[%d]%s" % (self.itf.sw_if_index,
" [anon]" if self.flags else "")
def query_vpp_config(self):
rs = self._test.vapi.gbp_ext_itf_dump()
for r in rs:
if r.ext_itf.sw_if_index == self.itf.sw_if_index:
return True
return False
class VppGbpSubnet(VppObject):
"""
GBP Subnet
"""
def __init__(self, test, rd, address, address_len,
type, sw_if_index=None, sclass=None):
self._test = test
self.rd_id = rd.rd_id
a = ip_address(address)
if 4 == a.version:
self.prefix = IPv4Network("%s/%d" % (address, address_len),
strict=False)
else:
self.prefix = IPv6Network("%s/%d" % (address, address_len),
strict=False)
self.type = type
self.sw_if_index = sw_if_index
self.sclass = sclass
def add_vpp_config(self):
self._test.vapi.gbp_subnet_add_del(
1,
self.rd_id,
self.prefix,
self.type,
sw_if_index=self.sw_if_index if self.sw_if_index else 0xffffffff,
sclass=self.sclass if self.sclass else 0xffff)
self._test.registry.register(self, self._test.logger)
def remove_vpp_config(self):
self._test.vapi.gbp_subnet_add_del(
0,
self.rd_id,
self.prefix,
self.type)
def object_id(self):
return "gbp-subnet:[%d-%s]" % (self.rd_id, self.prefix)
def query_vpp_config(self):
ss = self._test.vapi.gbp_subnet_dump()
for s in ss:
if s.subnet.rd_id == self.rd_id and \
s.subnet.type == self.type and \
s.subnet.prefix == self.prefix:
return True
return False
class VppGbpEndpointRetention(object):
def __init__(self, remote_ep_timeout=0xffffffff):
self.remote_ep_timeout = remote_ep_timeout
def encode(self):
return {'remote_ep_timeout': self.remote_ep_timeout}
class VppGbpEndpointGroup(VppObject):
"""
GBP Endpoint Group
"""
def __init__(self, test, vnid, sclass, rd, bd, uplink,
bvi, bvi_ip4, bvi_ip6=None,
retention=VppGbpEndpointRetention()):
self._test = test
self.uplink = uplink
self.bvi = bvi
self.bvi_ip4 = bvi_ip4
self.bvi_ip6 = bvi_ip6
self.vnid = vnid
self.bd = bd
self.rd = rd
self.sclass = sclass
if 0 == self.sclass:
self.sclass = 0xffff
self.retention = retention
def add_vpp_config(self):
self._test.vapi.gbp_endpoint_group_add(
self.vnid,
self.sclass,
self.bd.bd.bd_id,
self.rd.rd_id,
self.uplink.sw_if_index if self.uplink else INDEX_INVALID,
self.retention.encode())
self._test.registry.register(self, self._test.logger)
def remove_vpp_config(self):
self._test.vapi.gbp_endpoint_group_del(self.sclass)
def object_id(self):
return "gbp-endpoint-group:[%d]" % (self.vnid)
def query_vpp_config(self):
epgs = self._test.vapi.gbp_endpoint_group_dump()
for epg in epgs:
if epg.epg.vnid == self.vnid:
return True
return False
class VppGbpBridgeDomain(VppObject):
"""
GBP Bridge Domain
"""
def __init__(self, test, bd, rd, bvi, uu_fwd=None,
bm_flood=None, learn=True,
uu_drop=False, bm_drop=False,
ucast_arp=False):
self._test = test
self.bvi = bvi
self.uu_fwd = uu_fwd
self.bm_flood = bm_flood
self.bd = bd
self.rd = rd
e = VppEnum.vl_api_gbp_bridge_domain_flags_t
self.flags = e.GBP_BD_API_FLAG_NONE
if not learn:
self.flags |= e.GBP_BD_API_FLAG_DO_NOT_LEARN
if uu_drop:
self.flags |= e.GBP_BD_API_FLAG_UU_FWD_DROP
if bm_drop:
self.flags |= e.GBP_BD_API_FLAG_MCAST_DROP
if ucast_arp:
self.flags |= e.GBP_BD_API_FLAG_UCAST_ARP
def add_vpp_config(self):
self._test.vapi.gbp_bridge_domain_add(
self.bd.bd_id,
self.rd.rd_id,
self.flags,
self.bvi.sw_if_index,
self.uu_fwd.sw_if_index if self.uu_fwd else INDEX_INVALID,
self.bm_flood.sw_if_index if self.bm_flood else INDEX_INVALID)
self._test.registry.register(self, self._test.logger)
def remove_vpp_config(self):
self._test.vapi.gbp_bridge_domain_del(self.bd.bd_id)
def object_id(self):
return "gbp-bridge-domain:[%d]" % (self.bd.bd_id)
def query_vpp_config(self):
bds = self._test.vapi.gbp_bridge_domain_dump()
for bd in bds:
if bd.bd.bd_id == self.bd.bd_id:
return True
return False
class VppGbpRouteDomain(VppObject):
"""
GBP Route Domain
"""
def __init__(self, test, rd_id, scope, t4, t6, ip4_uu=None, ip6_uu=None):
self._test = test
self.rd_id = rd_id
self.scope = scope
self.t4 = t4
self.t6 = t6
self.ip4_uu = ip4_uu
self.ip6_uu = ip6_uu
def add_vpp_config(self):
self._test.vapi.gbp_route_domain_add(
self.rd_id,
self.scope,
self.t4.table_id,
self.t6.table_id,
self.ip4_uu.sw_if_index if self.ip4_uu else INDEX_INVALID,
self.ip6_uu.sw_if_index if self.ip6_uu else INDEX_INVALID)
self._test.registry.register(self, self._test.logger)
def remove_vpp_config(self):
self._test.vapi.gbp_route_domain_del(self.rd_id)
def object_id(self):
return "gbp-route-domain:[%d]" % (self.rd_id)
def query_vpp_config(self):
rds = self._test.vapi.gbp_route_domain_dump()
for rd in rds:
if rd.rd.rd_id == self.rd_id:
return True
return False
class VppGbpContractNextHop():
def __init__(self, mac, bd, ip, rd):
self.mac = mac
self.ip = ip
self.bd = bd
self.rd = rd
def encode(self):
return {'ip': self.ip,
'mac': self.mac.packed,
'bd_id': self.bd.bd.bd_id,
'rd_id': self.rd.rd_id}
class VppGbpContractRule():
def __init__(self, action, hash_mode, nhs=None):
self.action = action
self.hash_mode = hash_mode
self.nhs = [] if nhs is None else nhs
def encode(self):
nhs = []
for nh in self.nhs:
nhs.append(nh.encode())
while len(nhs) < 8:
nhs.append({})
return {'action': self.action,
'nh_set': {
'hash_mode': self.hash_mode,
'n_nhs': len(self.nhs),
'nhs': nhs}}
def __repr__(self):
return '<VppGbpContractRule action=%s, hash_mode=%s>' % (
self.action, self.hash_mode)
class VppGbpContract(VppObject):
"""
GBP Contract
"""
def __init__(self, test, scope, sclass, dclass, acl_index,
rules, allowed_ethertypes):
self._test = test
if not isinstance(rules, list):
raise ValueError("'rules' must be a list.")
if not isinstance(allowed_ethertypes, list):
raise ValueError("'allowed_ethertypes' must be a list.")
self.scope = scope
self.acl_index = acl_index
self.sclass = sclass
self.dclass = dclass
self.rules = rules
self.allowed_ethertypes = allowed_ethertypes
while (len(self.allowed_ethertypes) < 16):
self.allowed_ethertypes.append(0)
def add_vpp_config(self):
rules = []
for r in self.rules:
rules.append(r.encode())
r = self._test.vapi.gbp_contract_add_del(
is_add=1,
contract={
'acl_index': self.acl_index,
'scope': self.scope,
'sclass': self.sclass,
'dclass': self.dclass,
'n_rules': len(rules),
'rules': rules,
'n_ether_types': len(self.allowed_ethertypes),
'allowed_ethertypes': self.allowed_ethertypes})
self.stats_index = r.stats_index
self._test.registry.register(self, self._test.logger)
def remove_vpp_config(self):
self._test.vapi.gbp_contract_add_del(
is_add=0,
contract={
'acl_index': self.acl_index,
'scope': self.scope,
'sclass': self.sclass,
'dclass': self.dclass,
'n_rules': 0,
'rules': [],
'n_ether_types': len(self.allowed_ethertypes),
'allowed_ethertypes': self.allowed_ethertypes})
def object_id(self):
return "gbp-contract:[%d:%d:%d:%d]" % (self.scope,
self.sclass,
self.dclass,
self.acl_index)
def query_vpp_config(self):
cs = self._test.vapi.gbp_contract_dump()
for c in cs:
if c.contract.scope == self.scope \
and c.contract.sclass == self.sclass \
and c.contract.dclass == self.dclass:
return True
return False
def get_drop_stats(self):
c = self._test.statistics.get_counter("/net/gbp/contract/drop")
return c[0][self.stats_index]
def get_permit_stats(self):
c = self._test.statistics.get_counter("/net/gbp/contract/permit")
return c[0][self.stats_index]
class VppGbpVxlanTunnel(VppInterface):
"""
GBP VXLAN tunnel
"""
def __init__(self, test, vni, bd_rd_id, mode, src):
super(VppGbpVxlanTunnel, self).__init__(test)
self._test = test
self.vni = vni
self.bd_rd_id = bd_rd_id
self.mode = mode
self.src = src
def add_vpp_config(self):
r = self._test.vapi.gbp_vxlan_tunnel_add(
self.vni,
self.bd_rd_id,
self.mode,
self.src)
self.set_sw_if_index(r.sw_if_index)
self._test.registry.register(self, self._test.logger)
def remove_vpp_config(self):
self._test.vapi.gbp_vxlan_tunnel_del(self.vni)
def object_id(self):
return "gbp-vxlan:%d" % (self.sw_if_index)
def query_vpp_config(self):
return find_gbp_vxlan(self._test, self.vni)
class VppGbpAcl(VppObject):
"""
GBP Acl
"""
def __init__(self, test):
self._test = test
self.acl_index = 4294967295
def create_rule(self, is_ipv6=0, permit_deny=0, proto=-1,
s_prefix=0, s_ip=b'\x00\x00\x00\x00', sport_from=0,
sport_to=65535, d_prefix=0, d_ip=b'\x00\x00\x00\x00',
dport_from=0, dport_to=65535):
if proto == -1 or proto == 0:
sport_to = 0
dport_to = sport_to
elif proto == 1 or proto == 58:
sport_to = 255
dport_to = sport_to
rule = ({'is_permit': permit_deny, 'is_ipv6': is_ipv6, 'proto': proto,
'srcport_or_icmptype_first': sport_from,
'srcport_or_icmptype_last': sport_to,
'src_ip_prefix_len': s_prefix,
'src_ip_addr': s_ip,
'dstport_or_icmpcode_first': dport_from,
'dstport_or_icmpcode_last': dport_to,
'dst_ip_prefix_len': d_prefix,
'dst_ip_addr': d_ip})
return rule
def add_vpp_config(self, rules):
reply = self._test.vapi.acl_add_replace(acl_index=self.acl_index,
r=rules,
tag=b'GBPTest')
self.acl_index = reply.acl_index
return self.acl_index
def remove_vpp_config(self):
self._test.vapi.acl_del(self.acl_index)
def object_id(self):
return "gbp-acl:[%d]" % (self.acl_index)
def query_vpp_config(self):
cs = self._test.vapi.acl_dump()
for c in cs:
if c.acl_index == self.acl_index:
return True
return False
class TestGBP(VppTestCase):
""" GBP Test Case """
@property
def config_flags(self):
return VppEnum.vl_api_nat_config_flags_t
@classmethod
def setUpClass(cls):
super(TestGBP, cls).setUpClass()
@classmethod
def tearDownClass(cls):
super(TestGBP, cls).tearDownClass()
def setUp(self):
super(TestGBP, self).setUp()
self.create_pg_interfaces(range(9))
self.create_loopback_interfaces(8)
self.router_mac = MACAddress("00:11:22:33:44:55")
for i in self.pg_interfaces:
i.admin_up()
for i in self.lo_interfaces:
i.admin_up()
self.vlan_100 = VppDot1QSubint(self, self.pg0, 100)
self.vlan_100.admin_up()
self.vlan_101 = VppDot1QSubint(self, self.pg0, 101)
self.vlan_101.admin_up()
self.vlan_102 = VppDot1QSubint(self, self.pg0, 102)
self.vlan_102.admin_up()
def tearDown(self):
for i in self.pg_interfaces:
i.admin_down()
super(TestGBP, self).tearDown()
for i in self.lo_interfaces:
i.remove_vpp_config()
self.lo_interfaces = []
self.vlan_102.remove_vpp_config()
self.vlan_101.remove_vpp_config()
self.vlan_100.remove_vpp_config()
def send_and_expect_bridged(self, src, tx, dst):
rx = self.send_and_expect(src, tx, dst)
for r in rx:
self.assertEqual(r[Ether].src, tx[0][Ether].src)
self.assertEqual(r[Ether].dst, tx[0][Ether].dst)
self.assertEqual(r[IP].src, tx[0][IP].src)
self.assertEqual(r[IP].dst, tx[0][IP].dst)
return rx
def send_and_expect_bridged6(self, src, tx, dst):
rx = self.send_and_expect(src, tx, dst)
for r in rx:
self.assertEqual(r[Ether].src, tx[0][Ether].src)
self.assertEqual(r[Ether].dst, tx[0][Ether].dst)
self.assertEqual(r[IPv6].src, tx[0][IPv6].src)
self.assertEqual(r[IPv6].dst, tx[0][IPv6].dst)
return rx
def send_and_expect_routed(self, src, tx, dst, src_mac):
rx = self.send_and_expect(src, tx, dst)
for r in rx:
self.assertEqual(r[Ether].src, src_mac)
self.assertEqual(r[Ether].dst, dst.remote_mac)
self.assertEqual(r[IP].src, tx[0][IP].src)
self.assertEqual(r[IP].dst, tx[0][IP].dst)
return rx
def send_and_expect_routed6(self, src, tx, dst, src_mac):
rx = self.send_and_expect(src, tx, dst)
for r in rx:
self.assertEqual(r[Ether].src, src_mac)
self.assertEqual(r[Ether].dst, dst.remote_mac)
self.assertEqual(r[IPv6].src, tx[0][IPv6].src)
self.assertEqual(r[IPv6].dst, tx[0][IPv6].dst)
return rx
def send_and_expect_natted(self, src, tx, dst, src_ip):
rx = self.send_and_expect(src, tx, dst)
for r in rx:
self.assertEqual(r[Ether].src, tx[0][Ether].src)
self.assertEqual(r[Ether].dst, tx[0][Ether].dst)
self.assertEqual(r[IP].src, src_ip)
self.assertEqual(r[IP].dst, tx[0][IP].dst)
return rx
def send_and_expect_natted6(self, src, tx, dst, src_ip):
rx = self.send_and_expect(src, tx, dst)
for r in rx:
self.assertEqual(r[Ether].src, tx[0][Ether].src)
self.assertEqual(r[Ether].dst, tx[0][Ether].dst)
self.assertEqual(r[IPv6].src, src_ip)
self.assertEqual(r[IPv6].dst, tx[0][IPv6].dst)
return rx
def send_and_expect_unnatted(self, src, tx, dst, dst_ip):
rx = self.send_and_expect(src, tx, dst)
for r in rx:
self.assertEqual(r[Ether].src, tx[0][Ether].src)
self.assertEqual(r[Ether].dst, tx[0][Ether].dst)
self.assertEqual(r[IP].dst, dst_ip)
self.assertEqual(r[IP].src, tx[0][IP].src)
return rx
def send_and_expect_unnatted6(self, src, tx, dst, dst_ip):
rx = self.send_and_expect(src, tx, dst)
for r in rx:
self.assertEqual(r[Ether].src, tx[0][Ether].src)
self.assertEqual(r[Ether].dst, tx[0][Ether].dst)
self.assertEqual(r[IPv6].dst, dst_ip)
self.assertEqual(r[IPv6].src, tx[0][IPv6].src)
return rx
def send_and_expect_double_natted(self, src, tx, dst, src_ip, dst_ip):
rx = self.send_and_expect(src, tx, dst)
for r in rx:
self.assertEqual(r[Ether].src, str(self.router_mac))
self.assertEqual(r[Ether].dst, dst.remote_mac)
self.assertEqual(r[IP].dst, dst_ip)
self.assertEqual(r[IP].src, src_ip)
return rx
def send_and_expect_double_natted6(self, src, tx, dst, src_ip, dst_ip):
rx = self.send_and_expect(src, tx, dst)
for r in rx:
self.assertEqual(r[Ether].src, str(self.router_mac))
self.assertEqual(r[Ether].dst, dst.remote_mac)
self.assertEqual(r[IPv6].dst, dst_ip)
self.assertEqual(r[IPv6].src, src_ip)
return rx
def send_and_expect_no_arp(self, src, tx, dst):
self.pg_send(src, tx)
dst.get_capture(0, timeout=1)
dst.assert_nothing_captured(remark="")
timeout = 0.1
def send_and_expect_arp(self, src, tx, dst):
rx = self.send_and_expect(src, tx, dst)
for r in rx:
self.assertEqual(r[Ether].src, tx[0][Ether].src)
self.assertEqual(r[Ether].dst, tx[0][Ether].dst)
self.assertEqual(r[ARP].psrc, tx[0][ARP].psrc)
self.assertEqual(r[ARP].pdst, tx[0][ARP].pdst)
self.assertEqual(r[ARP].hwsrc, tx[0][ARP].hwsrc)
self.assertEqual(r[ARP].hwdst, tx[0][ARP].hwdst)
return rx
def test_gbp(self):
""" Group Based Policy """
ep_flags = VppEnum.vl_api_gbp_endpoint_flags_t
#
# Route Domains
#
gt4 = VppIpTable(self, 0)
gt4.add_vpp_config()
gt6 = VppIpTable(self, 0, is_ip6=True)
gt6.add_vpp_config()
nt4 = VppIpTable(self, 20)
nt4.add_vpp_config()
nt6 = VppIpTable(self, 20, is_ip6=True)
nt6.add_vpp_config()
rd0 = VppGbpRouteDomain(self, 0, 400, gt4, gt6, None, None)
rd20 = VppGbpRouteDomain(self, 20, 420, nt4, nt6, None, None)
rd0.add_vpp_config()
rd20.add_vpp_config()
#
# Bridge Domains
#
bd1 = VppBridgeDomain(self, 1)
bd2 = VppBridgeDomain(self, 2)
bd20 = VppBridgeDomain(self, 20)
bd1.add_vpp_config()
bd2.add_vpp_config()
bd20.add_vpp_config()
gbd1 = VppGbpBridgeDomain(self, bd1, rd0, self.loop0)
gbd2 = VppGbpBridgeDomain(self, bd2, rd0, self.loop1)
gbd20 = VppGbpBridgeDomain(self, bd20, rd20, self.loop2)
gbd1.add_vpp_config()
gbd2.add_vpp_config()
gbd20.add_vpp_config()
#
# 3 EPGs, 2 of which share a BD.
# 2 NAT EPGs, one for floating-IP subnets, the other for internet
#
epgs = [VppGbpEndpointGroup(self, 220, 1220, rd0, gbd1,
self.pg4, self.loop0,
"10.0.0.128", "2001:10::128"),
VppGbpEndpointGroup(self, 221, 1221, rd0, gbd1,
self.pg5, self.loop0,
"10.0.1.128", "2001:10:1::128"),
VppGbpEndpointGroup(self, 222, 1222, rd0, gbd2,
self.pg6, self.loop1,
"10.0.2.128", "2001:10:2::128"),
VppGbpEndpointGroup(self, 333, 1333, rd20, gbd20,
self.pg7, self.loop2,
"11.0.0.128", "3001::128"),
VppGbpEndpointGroup(self, 444, 1444, rd20, gbd20,
self.pg8, self.loop2,
"11.0.0.129", "3001::129")]
recircs = [VppGbpRecirc(self, epgs[0], self.loop3),
VppGbpRecirc(self, epgs[1], self.loop4),
VppGbpRecirc(self, epgs[2], self.loop5),
VppGbpRecirc(self, epgs[3], self.loop6, is_ext=True),
VppGbpRecirc(self, epgs[4], self.loop7, is_ext=True)]
epg_nat = epgs[3]
recirc_nat = recircs[3]
#
# 4 end-points, 2 in the same subnet, 3 in the same BD
#
eps = [VppGbpEndpoint(self, self.pg0,
epgs[0], recircs[0],
"10.0.0.1", "11.0.0.1",
"2001:10::1", "3001::1"),
VppGbpEndpoint(self, self.pg1,
epgs[0], recircs[0],
"10.0.0.2", "11.0.0.2",
"2001:10::2", "3001::2"),
VppGbpEndpoint(self, self.pg2,
epgs[1], recircs[1],
"10.0.1.1", "11.0.0.3",
"2001:10:1::1", "3001::3"),
VppGbpEndpoint(self, self.pg3,
epgs[2], recircs[2],
"10.0.2.1", "11.0.0.4",
"2001:10:2::1", "3001::4")]
#
# Config related to each of the EPGs
#
for epg in epgs:
# IP config on the BVI interfaces
if epg != epgs[1] and epg != epgs[4]:
VppIpInterfaceBind(self, epg.bvi, epg.rd.t4).add_vpp_config()
VppIpInterfaceBind(self, epg.bvi, epg.rd.t6).add_vpp_config()
epg.bvi.set_mac(self.router_mac)
# The BVIs are NAT inside interfaces
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=epg.bvi.sw_if_index,
flags=flags, is_add=1)
self.vapi.nat66_add_del_interface(
is_add=1, flags=flags,
sw_if_index=epg.bvi.sw_if_index)
if_ip4 = VppIpInterfaceAddress(self, epg.bvi, epg.bvi_ip4, 32)
if_ip6 = VppIpInterfaceAddress(self, epg.bvi, epg.bvi_ip6, 128)
if_ip4.add_vpp_config()
if_ip6.add_vpp_config()
# EPG uplink interfaces in the RD
VppIpInterfaceBind(self, epg.uplink, epg.rd.t4).add_vpp_config()
VppIpInterfaceBind(self, epg.uplink, epg.rd.t6).add_vpp_config()
# add the BD ARP termination entry for BVI IP
epg.bd_arp_ip4 = VppBridgeDomainArpEntry(self, epg.bd.bd,
str(self.router_mac),
epg.bvi_ip4)
epg.bd_arp_ip6 = VppBridgeDomainArpEntry(self, epg.bd.bd,
str(self.router_mac),
epg.bvi_ip6)
epg.bd_arp_ip4.add_vpp_config()
epg.bd_arp_ip6.add_vpp_config()
# EPG in VPP
epg.add_vpp_config()
for recirc in recircs:
# EPG's ingress recirculation interface maps to its RD
VppIpInterfaceBind(self, recirc.recirc,
recirc.epg.rd.t4).add_vpp_config()
VppIpInterfaceBind(self, recirc.recirc,
recirc.epg.rd.t6).add_vpp_config()
self.vapi.nat44_interface_add_del_feature(
sw_if_index=recirc.recirc.sw_if_index, is_add=1)
self.vapi.nat66_add_del_interface(
is_add=1,
sw_if_index=recirc.recirc.sw_if_index)
recirc.add_vpp_config()
for recirc in recircs:
self.assertTrue(find_bridge_domain_port(self,
recirc.epg.bd.bd.bd_id,
recirc.recirc.sw_if_index))
for ep in eps:
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
#
# routes to the endpoints. We need these since there are no
# adj-fibs due to the fact the the BVI address has /32 and
# the subnet is not attached.
#
for (ip, fip) in zip(ep.ips, ep.fips):
# Add static mappings for each EP from the 10/8 to 11/8 network
if ip_address(ip).version == 4:
flags = self.config_flags.NAT_IS_ADDR_ONLY
self.vapi.nat44_add_del_static_mapping(
is_add=1,
local_ip_address=ip,
external_ip_address=fip,
external_sw_if_index=0xFFFFFFFF,
vrf_id=0,
flags=flags)
else:
self.vapi.nat66_add_del_static_mapping(
local_ip_address=ip,
external_ip_address=fip,
vrf_id=0, is_add=1)
# VPP EP create ...
ep.add_vpp_config()
self.logger.info(self.vapi.cli("sh gbp endpoint"))
# ... results in a Gratuitous ARP/ND on the EPG's uplink
rx = ep.epg.uplink.get_capture(len(ep.ips), timeout=0.2)
for ii, ip in enumerate(ep.ips):
p = rx[ii]
if ip_address(ip).version == 6:
self.assertTrue(p.haslayer(ICMPv6ND_NA))
self.assertEqual(p[ICMPv6ND_NA].tgt, ip)
else:
self.assertTrue(p.haslayer(ARP))
self.assertEqual(p[ARP].psrc, ip)
self.assertEqual(p[ARP].pdst, ip)
# add the BD ARP termination entry for floating IP
for fip in ep.fips:
ba = VppBridgeDomainArpEntry(self, epg_nat.bd.bd, ep.mac,
fip)
ba.add_vpp_config()
# floating IPs route via EPG recirc
r = VppIpRoute(
self, fip, ip_address(fip).max_prefixlen,
[VppRoutePath(fip,
ep.recirc.recirc.sw_if_index,
type=FibPathType.FIB_PATH_TYPE_DVR,
proto=get_dpo_proto(fip))],
table_id=20)
r.add_vpp_config()
# L2 FIB entries in the NAT EPG BD to bridge the packets from
# the outside direct to the internal EPG
lf = VppL2FibEntry(self, epg_nat.bd.bd, ep.mac,
ep.recirc.recirc, bvi_mac=0)
lf.add_vpp_config()
#
# ARP packets for unknown IP are sent to the EPG uplink
#
pkt_arp = (Ether(dst="ff:ff:ff:ff:ff:ff",
src=self.pg0.remote_mac) /
ARP(op="who-has",
hwdst="ff:ff:ff:ff:ff:ff",
hwsrc=self.pg0.remote_mac,
pdst="10.0.0.88",
psrc="10.0.0.99"))
self.vapi.cli("clear trace")
self.pg0.add_stream(pkt_arp)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rxd = epgs[0].uplink.get_capture(1)
#
# ARP/ND packets get a response
#
pkt_arp = (Ether(dst="ff:ff:ff:ff:ff:ff",
src=self.pg0.remote_mac) /
ARP(op="who-has",
hwdst="ff:ff:ff:ff:ff:ff",
hwsrc=self.pg0.remote_mac,
pdst=epgs[0].bvi_ip4,
psrc=eps[0].ip4))
self.send_and_expect(self.pg0, [pkt_arp], self.pg0)
nsma = in6_getnsma(inet_pton(AF_INET6, eps[0].ip6))
d = inet_ntop(AF_INET6, nsma)
pkt_nd = (Ether(dst=in6_getnsmac(nsma),
src=self.pg0.remote_mac) /
IPv6(dst=d, src=eps[0].ip6) /
ICMPv6ND_NS(tgt=epgs[0].bvi_ip6) /
ICMPv6NDOptSrcLLAddr(lladdr=self.pg0.remote_mac))
self.send_and_expect(self.pg0, [pkt_nd], self.pg0)
#
# broadcast packets are flooded
#
pkt_bcast = (Ether(dst="ff:ff:ff:ff:ff:ff",
src=self.pg0.remote_mac) /
IP(src=eps[0].ip4, dst="232.1.1.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.vapi.cli("clear trace")
self.pg0.add_stream(pkt_bcast)
self.pg_enable_capture(self.pg_interfaces)
self.pg_start()
rxd = eps[1].itf.get_capture(1)
self.assertEqual(rxd[0][Ether].dst, pkt_bcast[Ether].dst)
rxd = epgs[0].uplink.get_capture(1)
self.assertEqual(rxd[0][Ether].dst, pkt_bcast[Ether].dst)
#
# packets to non-local L3 destinations dropped
#
pkt_intra_epg_220_ip4 = (Ether(src=self.pg0.remote_mac,
dst=str(self.router_mac)) /
IP(src=eps[0].ip4,
dst="10.0.0.99") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
pkt_inter_epg_222_ip4 = (Ether(src=self.pg0.remote_mac,
dst=str(self.router_mac)) /
IP(src=eps[0].ip4,
dst="10.0.1.99") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_assert_no_replies(self.pg0,
pkt_intra_epg_220_ip4 * NUM_PKTS)
pkt_inter_epg_222_ip6 = (Ether(src=self.pg0.remote_mac,
dst=str(self.router_mac)) /
IPv6(src=eps[0].ip6,
dst="2001:10::99") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_assert_no_replies(self.pg0,
pkt_inter_epg_222_ip6 * NUM_PKTS)
#
# Add the subnet routes
#
s41 = VppGbpSubnet(
self, rd0, "10.0.0.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_STITCHED_INTERNAL)
s42 = VppGbpSubnet(
self, rd0, "10.0.1.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_STITCHED_INTERNAL)
s43 = VppGbpSubnet(
self, rd0, "10.0.2.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_STITCHED_INTERNAL)
s61 = VppGbpSubnet(
self, rd0, "2001:10::1", 64,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_STITCHED_INTERNAL)
s62 = VppGbpSubnet(
self, rd0, "2001:10:1::1", 64,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_STITCHED_INTERNAL)
s63 = VppGbpSubnet(
self, rd0, "2001:10:2::1", 64,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_STITCHED_INTERNAL)
s41.add_vpp_config()
s42.add_vpp_config()
s43.add_vpp_config()
s61.add_vpp_config()
s62.add_vpp_config()
s63.add_vpp_config()
self.send_and_expect_bridged(eps[0].itf,
pkt_intra_epg_220_ip4 * NUM_PKTS,
eps[0].epg.uplink)
self.send_and_expect_bridged(eps[0].itf,
pkt_inter_epg_222_ip4 * NUM_PKTS,
eps[0].epg.uplink)
self.send_and_expect_bridged6(eps[0].itf,
pkt_inter_epg_222_ip6 * NUM_PKTS,
eps[0].epg.uplink)
self.logger.info(self.vapi.cli("sh ip fib 11.0.0.2"))
self.logger.info(self.vapi.cli("sh gbp endpoint-group"))
self.logger.info(self.vapi.cli("sh gbp endpoint"))
self.logger.info(self.vapi.cli("sh gbp recirc"))
self.logger.info(self.vapi.cli("sh int"))
self.logger.info(self.vapi.cli("sh int addr"))
self.logger.info(self.vapi.cli("sh int feat loop6"))
self.logger.info(self.vapi.cli("sh vlib graph ip4-gbp-src-classify"))
self.logger.info(self.vapi.cli("sh int feat loop3"))
self.logger.info(self.vapi.cli("sh int feat pg0"))
#
# Packet destined to unknown unicast is sent on the epg uplink ...
#
pkt_intra_epg_220_to_uplink = (Ether(src=self.pg0.remote_mac,
dst="00:00:00:33:44:55") /
IP(src=eps[0].ip4,
dst="10.0.0.99") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect_bridged(eps[0].itf,
pkt_intra_epg_220_to_uplink * NUM_PKTS,
eps[0].epg.uplink)
# ... and nowhere else
self.pg1.get_capture(0, timeout=0.1)
self.pg1.assert_nothing_captured(remark="Flood onto other VMS")
pkt_intra_epg_221_to_uplink = (Ether(src=self.pg2.remote_mac,
dst="00:00:00:33:44:66") /
IP(src=eps[0].ip4,
dst="10.0.0.99") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect_bridged(eps[2].itf,
pkt_intra_epg_221_to_uplink * NUM_PKTS,
eps[2].epg.uplink)
#
# Packets from the uplink are forwarded in the absence of a contract
#
pkt_intra_epg_220_from_uplink = (Ether(src="00:00:00:33:44:55",
dst=self.pg0.remote_mac) /
IP(src=eps[0].ip4,
dst="10.0.0.99") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect_bridged(self.pg4,
pkt_intra_epg_220_from_uplink * NUM_PKTS,
self.pg0)
#
# in the absence of policy, endpoints in the same EPG
# can communicate
#
pkt_intra_epg = (Ether(src=self.pg0.remote_mac,
dst=self.pg1.remote_mac) /
IP(src=eps[0].ip4,
dst=eps[1].ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect_bridged(self.pg0,
pkt_intra_epg * NUM_PKTS,
self.pg1)
#
# in the absence of policy, endpoints in the different EPG
# cannot communicate
#
pkt_inter_epg_220_to_221 = (Ether(src=self.pg0.remote_mac,
dst=self.pg2.remote_mac) /
IP(src=eps[0].ip4,
dst=eps[2].ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
pkt_inter_epg_221_to_220 = (Ether(src=self.pg2.remote_mac,
dst=self.pg0.remote_mac) /
IP(src=eps[2].ip4,
dst=eps[0].ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
pkt_inter_epg_220_to_222 = (Ether(src=self.pg0.remote_mac,
dst=str(self.router_mac)) /
IP(src=eps[0].ip4,
dst=eps[3].ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_assert_no_replies(eps[0].itf,
pkt_inter_epg_220_to_221 * NUM_PKTS)
self.send_and_assert_no_replies(eps[0].itf,
pkt_inter_epg_220_to_222 * NUM_PKTS)
#
# A uni-directional contract from EPG 220 -> 221
#
acl = VppGbpAcl(self)
rule = acl.create_rule(permit_deny=1, proto=17)
rule2 = acl.create_rule(is_ipv6=1, permit_deny=1, proto=17)
acl_index = acl.add_vpp_config([rule, rule2])
c1 = VppGbpContract(
self, 400, epgs[0].sclass, epgs[1].sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c1.add_vpp_config()
self.send_and_expect_bridged(eps[0].itf,
pkt_inter_epg_220_to_221 * NUM_PKTS,
eps[2].itf)
self.send_and_assert_no_replies(eps[0].itf,
pkt_inter_epg_220_to_222 * NUM_PKTS)
#
# contract for the return direction
#
c2 = VppGbpContract(
self, 400, epgs[1].sclass, epgs[0].sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c2.add_vpp_config()
self.send_and_expect_bridged(eps[0].itf,
pkt_inter_epg_220_to_221 * NUM_PKTS,
eps[2].itf)
self.send_and_expect_bridged(eps[2].itf,
pkt_inter_epg_221_to_220 * NUM_PKTS,
eps[0].itf)
ds = c2.get_drop_stats()
self.assertEqual(ds['packets'], 0)
ps = c2.get_permit_stats()
self.assertEqual(ps['packets'], NUM_PKTS)
#
# the contract does not allow non-IP
#
pkt_non_ip_inter_epg_220_to_221 = (Ether(src=self.pg0.remote_mac,
dst=self.pg2.remote_mac) /
ARP())
self.send_and_assert_no_replies(eps[0].itf,
pkt_non_ip_inter_epg_220_to_221 * 17)
#
# check that inter group is still disabled for the groups
# not in the contract.
#
self.send_and_assert_no_replies(eps[0].itf,
pkt_inter_epg_220_to_222 * NUM_PKTS)
#
# A uni-directional contract from EPG 220 -> 222 'L3 routed'
#
c3 = VppGbpContract(
self, 400, epgs[0].sclass, epgs[2].sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c3.add_vpp_config()
self.logger.info(self.vapi.cli("sh gbp contract"))
self.send_and_expect_routed(eps[0].itf,
pkt_inter_epg_220_to_222 * NUM_PKTS,
eps[3].itf,
str(self.router_mac))
#
# remove both contracts, traffic stops in both directions
#
c2.remove_vpp_config()
c1.remove_vpp_config()
c3.remove_vpp_config()
acl.remove_vpp_config()
self.send_and_assert_no_replies(eps[2].itf,
pkt_inter_epg_221_to_220 * NUM_PKTS)
self.send_and_assert_no_replies(eps[0].itf,
pkt_inter_epg_220_to_221 * NUM_PKTS)
self.send_and_expect_bridged(eps[0].itf,
pkt_intra_epg * NUM_PKTS,
eps[1].itf)
#
# EPs to the outside world
#
# in the EP's RD an external subnet via the NAT EPG's recirc
se1 = VppGbpSubnet(
self, rd0, "0.0.0.0", 0,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_STITCHED_EXTERNAL,
sw_if_index=recirc_nat.recirc.sw_if_index,
sclass=epg_nat.sclass)
se2 = VppGbpSubnet(
self, rd0, "11.0.0.0", 8,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_STITCHED_EXTERNAL,
sw_if_index=recirc_nat.recirc.sw_if_index,
sclass=epg_nat.sclass)
se16 = VppGbpSubnet(
self, rd0, "::", 0,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_STITCHED_EXTERNAL,
sw_if_index=recirc_nat.recirc.sw_if_index,
sclass=epg_nat.sclass)
# in the NAT RD an external subnet via the NAT EPG's uplink
se3 = VppGbpSubnet(
self, rd20, "0.0.0.0", 0,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_STITCHED_EXTERNAL,
sw_if_index=epg_nat.uplink.sw_if_index,
sclass=epg_nat.sclass)
se36 = VppGbpSubnet(
self, rd20, "::", 0,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_STITCHED_EXTERNAL,
sw_if_index=epg_nat.uplink.sw_if_index,
sclass=epg_nat.sclass)
se4 = VppGbpSubnet(
self, rd20, "11.0.0.0", 8,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_STITCHED_EXTERNAL,
sw_if_index=epg_nat.uplink.sw_if_index,
sclass=epg_nat.sclass)
se1.add_vpp_config()
se2.add_vpp_config()
se16.add_vpp_config()
se3.add_vpp_config()
se36.add_vpp_config()
se4.add_vpp_config()
self.logger.info(self.vapi.cli("sh ip fib 0.0.0.0/0"))
self.logger.info(self.vapi.cli("sh ip fib 11.0.0.1"))
self.logger.info(self.vapi.cli("sh ip6 fib ::/0"))
self.logger.info(self.vapi.cli("sh ip6 fib %s" %
eps[0].fip6))
#
# From an EP to an outside address: IN2OUT
#
pkt_inter_epg_220_to_global = (Ether(src=self.pg0.remote_mac,
dst=str(self.router_mac)) /
IP(src=eps[0].ip4,
dst="1.1.1.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
# no policy yet
self.send_and_assert_no_replies(eps[0].itf,
pkt_inter_epg_220_to_global * NUM_PKTS)
acl2 = VppGbpAcl(self)
rule = acl2.create_rule(permit_deny=1, proto=17, sport_from=1234,
sport_to=1234, dport_from=1234, dport_to=1234)
rule2 = acl2.create_rule(is_ipv6=1, permit_deny=1, proto=17,
sport_from=1234, sport_to=1234,
dport_from=1234, dport_to=1234)
acl_index2 = acl2.add_vpp_config([rule, rule2])
c4 = VppGbpContract(
self, 400, epgs[0].sclass, epgs[3].sclass, acl_index2,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c4.add_vpp_config()
self.send_and_expect_natted(eps[0].itf,
pkt_inter_epg_220_to_global * NUM_PKTS,
self.pg7,
eps[0].fip4)
pkt_inter_epg_220_to_global = (Ether(src=self.pg0.remote_mac,
dst=str(self.router_mac)) /
IPv6(src=eps[0].ip6,
dst="6001::1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect_natted6(self.pg0,
pkt_inter_epg_220_to_global * NUM_PKTS,
self.pg7,
eps[0].fip6)
#
# From a global address to an EP: OUT2IN
#
pkt_inter_epg_220_from_global = (Ether(src=str(self.router_mac),
dst=self.pg0.remote_mac) /
IP(dst=eps[0].fip4,
src="1.1.1.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_assert_no_replies(
self.pg7, pkt_inter_epg_220_from_global * NUM_PKTS)
c5 = VppGbpContract(
self, 400, epgs[3].sclass, epgs[0].sclass, acl_index2,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c5.add_vpp_config()
self.send_and_expect_unnatted(self.pg7,
pkt_inter_epg_220_from_global * NUM_PKTS,
eps[0].itf,
eps[0].ip4)
pkt_inter_epg_220_from_global = (Ether(src=str(self.router_mac),
dst=self.pg0.remote_mac) /
IPv6(dst=eps[0].fip6,
src="6001::1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect_unnatted6(
self.pg7,
pkt_inter_epg_220_from_global * NUM_PKTS,
eps[0].itf,
eps[0].ip6)
#
# From a local VM to another local VM using resp. public addresses:
# IN2OUT2IN
#
pkt_intra_epg_220_global = (Ether(src=self.pg0.remote_mac,
dst=str(self.router_mac)) /
IP(src=eps[0].ip4,
dst=eps[1].fip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect_double_natted(eps[0].itf,
pkt_intra_epg_220_global * NUM_PKTS,
eps[1].itf,
eps[0].fip4,
eps[1].ip4)
pkt_intra_epg_220_global = (Ether(src=self.pg0.remote_mac,
dst=str(self.router_mac)) /
IPv6(src=eps[0].ip6,
dst=eps[1].fip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect_double_natted6(
eps[0].itf,
pkt_intra_epg_220_global * NUM_PKTS,
eps[1].itf,
eps[0].fip6,
eps[1].ip6)
#
# cleanup
#
for ep in eps:
# del static mappings for each EP from the 10/8 to 11/8 network
flags = self.config_flags.NAT_IS_ADDR_ONLY
self.vapi.nat44_add_del_static_mapping(
is_add=0,
local_ip_address=ep.ip4,
external_ip_address=ep.fip4,
external_sw_if_index=0xFFFFFFFF,
vrf_id=0,
flags=flags)
self.vapi.nat66_add_del_static_mapping(
local_ip_address=ep.ip6,
external_ip_address=ep.fip6,
vrf_id=0, is_add=0)
for epg in epgs:
# IP config on the BVI interfaces
if epg != epgs[0] and epg != epgs[3]:
flags = self.config_flags.NAT_IS_INSIDE
self.vapi.nat44_interface_add_del_feature(
sw_if_index=epg.bvi.sw_if_index,
flags=flags,
is_add=0)
self.vapi.nat66_add_del_interface(
is_add=0, flags=flags,
sw_if_index=epg.bvi.sw_if_index)
for recirc in recircs:
self.vapi.nat44_interface_add_del_feature(
sw_if_index=recirc.recirc.sw_if_index,
is_add=0)
self.vapi.nat66_add_del_interface(
is_add=0,
sw_if_index=recirc.recirc.sw_if_index)
def wait_for_ep_timeout(self, sw_if_index=None, ip=None, mac=None,
tep=None, n_tries=100, s_time=1):
while (n_tries):
if not find_gbp_endpoint(self, sw_if_index, ip, mac, tep=tep):
return True
n_tries = n_tries - 1
self.sleep(s_time)
self.assertFalse(find_gbp_endpoint(self, sw_if_index, ip, mac))
return False
def test_gbp_learn_l2(self):
""" GBP L2 Endpoint Learning """
drop_no_contract = self.statistics.get_err_counter(
'/err/gbp-policy-port/drop-no-contract')
allow_intra_class = self.statistics.get_err_counter(
'/err/gbp-policy-port/allow-intra-sclass')
ep_flags = VppEnum.vl_api_gbp_endpoint_flags_t
learnt = [{'mac': '00:00:11:11:11:01',
'ip': '10.0.0.1',
'ip6': '2001:10::2'},
{'mac': '00:00:11:11:11:02',
'ip': '10.0.0.2',
'ip6': '2001:10::3'}]
#
# IP tables
#
gt4 = VppIpTable(self, 1)
gt4.add_vpp_config()
gt6 = VppIpTable(self, 1, is_ip6=True)
gt6.add_vpp_config()
rd1 = VppGbpRouteDomain(self, 1, 401, gt4, gt6)
rd1.add_vpp_config()
#
# Pg2 hosts the vxlan tunnel, hosts on pg2 to act as TEPs
# Pg3 hosts the IP4 UU-flood VXLAN tunnel
# Pg4 hosts the IP6 UU-flood VXLAN tunnel
#
self.pg2.config_ip4()
self.pg2.resolve_arp()
self.pg2.generate_remote_hosts(4)
self.pg2.configure_ipv4_neighbors()
self.pg3.config_ip4()
self.pg3.resolve_arp()
self.pg4.config_ip4()
self.pg4.resolve_arp()
#
# Add a mcast destination VXLAN-GBP tunnel for B&M traffic
#
tun_bm = VppVxlanGbpTunnel(self, self.pg4.local_ip4,
"239.1.1.1", 88,
mcast_itf=self.pg4)
tun_bm.add_vpp_config()
#
# a GBP bridge domain with a BVI and a UU-flood interface
#
bd1 = VppBridgeDomain(self, 1)
bd1.add_vpp_config()
gbd1 = VppGbpBridgeDomain(self, bd1, rd1, self.loop0,
self.pg3, tun_bm)
gbd1.add_vpp_config()
self.logger.info(self.vapi.cli("sh bridge 1 detail"))
self.logger.info(self.vapi.cli("sh gbp bridge"))
# ... and has a /32 applied
ip_addr = VppIpInterfaceAddress(self, gbd1.bvi, "10.0.0.128", 32)
ip_addr.add_vpp_config()
#
# The Endpoint-group in which we are learning endpoints
#
epg_220 = VppGbpEndpointGroup(self, 220, 112, rd1, gbd1,
None, self.loop0,
"10.0.0.128",
"2001:10::128",
VppGbpEndpointRetention(2))
epg_220.add_vpp_config()
epg_330 = VppGbpEndpointGroup(self, 330, 113, rd1, gbd1,
None, self.loop1,
"10.0.1.128",
"2001:11::128",
VppGbpEndpointRetention(2))
epg_330.add_vpp_config()
#
# The VXLAN GBP tunnel is a bridge-port and has L2 endpoint
# learning enabled
#
vx_tun_l2_1 = VppGbpVxlanTunnel(
self, 99, bd1.bd_id,
VppEnum.vl_api_gbp_vxlan_tunnel_mode_t.GBP_VXLAN_TUNNEL_MODE_L2,
self.pg2.local_ip4)
vx_tun_l2_1.add_vpp_config()
#
# A static endpoint that the learnt endpoints are trying to
# talk to
#
ep = VppGbpEndpoint(self, self.pg0,
epg_220, None,
"10.0.0.127", "11.0.0.127",
"2001:10::1", "3001::1")
ep.add_vpp_config()
self.assertTrue(find_route(self, ep.ip4, 32, table_id=1))
# a packet with an sclass from an unknown EPG
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[0].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=99, gpid=88, flags=0x88) /
Ether(src=learnt[0]["mac"], dst=ep.mac) /
IP(src=learnt[0]["ip"], dst=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_assert_no_replies(self.pg2, p)
self.logger.info(self.vapi.cli("sh error"))
self.assert_error_counter_equal(
'/err/gbp-policy-port/drop-no-contract',
drop_no_contract + 1)
#
# we should not have learnt a new tunnel endpoint, since
# the EPG was not learnt.
#
self.assertEqual(INDEX_INVALID,
find_vxlan_gbp_tunnel(self,
self.pg2.local_ip4,
self.pg2.remote_hosts[0].ip4,
99))
# ep is not learnt, because the EPG is unknown
self.assertEqual(len(self.vapi.gbp_endpoint_dump()), 1)
#
# Learn new EPs from IP packets
#
for ii, l in enumerate(learnt):
# a packet with an sclass from a known EPG
# arriving on an unknown TEP
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[1].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=99, gpid=112, flags=0x88) /
Ether(src=l['mac'], dst=ep.mac) /
IP(src=l['ip'], dst=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rx = self.send_and_expect(self.pg2, [p], self.pg0)
# the new TEP
tep1_sw_if_index = find_vxlan_gbp_tunnel(
self,
self.pg2.local_ip4,
self.pg2.remote_hosts[1].ip4,
99)
self.assertNotEqual(INDEX_INVALID, tep1_sw_if_index)
#
# the EP is learnt via the learnt TEP
# both from its MAC and its IP
#
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l2_1.sw_if_index,
mac=l['mac']))
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l2_1.sw_if_index,
ip=l['ip']))
self.assert_error_counter_equal(
'/err/gbp-policy-port/allow-intra-sclass',
allow_intra_class + 2)
self.logger.info(self.vapi.cli("show gbp endpoint"))
self.logger.info(self.vapi.cli("show gbp vxlan"))
self.logger.info(self.vapi.cli("show ip mfib"))
#
# If we sleep for the threshold time, the learnt endpoints should
# age out
#
for l in learnt:
self.wait_for_ep_timeout(vx_tun_l2_1.sw_if_index,
mac=l['mac'])
#
# Learn new EPs from GARP packets received on the BD's mcast tunnel
#
for ii, l in enumerate(learnt):
# add some junk in the reserved field of the vxlan-header
# next to the VNI. we should accept since reserved bits are
# ignored on rx.
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[1].ip4,
dst="239.1.1.1") /
UDP(sport=1234, dport=48879) /
VXLAN(vni=88, reserved2=0x80, gpid=112, flags=0x88) /
Ether(src=l['mac'], dst="ff:ff:ff:ff:ff:ff") /
ARP(op="who-has",
psrc=l['ip'], pdst=l['ip'],
hwsrc=l['mac'], hwdst="ff:ff:ff:ff:ff:ff"))
rx = self.send_and_expect(self.pg4, [p], self.pg0)
# the new TEP
tep1_sw_if_index = find_vxlan_gbp_tunnel(
self,
self.pg2.local_ip4,
self.pg2.remote_hosts[1].ip4,
99)
self.assertNotEqual(INDEX_INVALID, tep1_sw_if_index)
#
# the EP is learnt via the learnt TEP
# both from its MAC and its IP
#
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l2_1.sw_if_index,
mac=l['mac']))
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l2_1.sw_if_index,
ip=l['ip']))
#
# wait for the learnt endpoints to age out
#
for l in learnt:
self.wait_for_ep_timeout(vx_tun_l2_1.sw_if_index,
mac=l['mac'])
#
# Learn new EPs from L2 packets
#
for ii, l in enumerate(learnt):
# a packet with an sclass from a known EPG
# arriving on an unknown TEP
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[1].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=99, gpid=112, flags=0x88) /
Ether(src=l['mac'], dst=ep.mac) /
Raw(b'\xa5' * 100))
rx = self.send_and_expect(self.pg2, [p], self.pg0)
# the new TEP
tep1_sw_if_index = find_vxlan_gbp_tunnel(
self,
self.pg2.local_ip4,
self.pg2.remote_hosts[1].ip4,
99)
self.assertNotEqual(INDEX_INVALID, tep1_sw_if_index)
#
# the EP is learnt via the learnt TEP
# both from its MAC and its IP
#
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l2_1.sw_if_index,
mac=l['mac']))
self.logger.info(self.vapi.cli("show gbp endpoint"))
self.logger.info(self.vapi.cli("show gbp vxlan"))
self.logger.info(self.vapi.cli("show vxlan-gbp tunnel"))
#
# wait for the learnt endpoints to age out
#
for l in learnt:
self.wait_for_ep_timeout(vx_tun_l2_1.sw_if_index,
mac=l['mac'])
#
# repeat. the do not learn bit is set so the EPs are not learnt
#
for l in learnt:
# a packet with an sclass from a known EPG
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[1].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=99, gpid=112, flags=0x88, gpflags="D") /
Ether(src=l['mac'], dst=ep.mac) /
IP(src=l['ip'], dst=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rx = self.send_and_expect(self.pg2, p * NUM_PKTS, self.pg0)
for l in learnt:
self.assertFalse(find_gbp_endpoint(self,
vx_tun_l2_1.sw_if_index,
mac=l['mac']))
#
# repeat
#
for l in learnt:
# a packet with an sclass from a known EPG
# set a reserved bit in addition to the G and I
# reserved bits should not be checked on rx.
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[1].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=99, gpid=112, flags=0xc8) /
Ether(src=l['mac'], dst=ep.mac) /
IP(src=l['ip'], dst=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rx = self.send_and_expect(self.pg2, p * NUM_PKTS, self.pg0)
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l2_1.sw_if_index,
mac=l['mac']))
#
# Static EP replies to dynamics
#
self.logger.info(self.vapi.cli("sh l2fib bd_id 1"))
for l in learnt:
p = (Ether(src=ep.mac, dst=l['mac']) /
IP(dst=l['ip'], src=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * 17, self.pg2)
for rx in rxs:
self.assertEqual(rx[IP].src, self.pg2.local_ip4)
self.assertEqual(rx[IP].dst, self.pg2.remote_hosts[1].ip4)
self.assertEqual(rx[UDP].dport, 48879)
# the UDP source port is a random value for hashing
self.assertEqual(rx[VXLAN].gpid, 112)
self.assertEqual(rx[VXLAN].vni, 99)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
for l in learnt:
self.wait_for_ep_timeout(vx_tun_l2_1.sw_if_index,
mac=l['mac'])
#
# repeat in the other EPG
# there's no contract between 220 and 330, but the A-bit is set
# so the packet is cleared for delivery
#
for l in learnt:
# a packet with an sclass from a known EPG
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[1].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=99, gpid=113, flags=0x88, gpflags='A') /
Ether(src=l['mac'], dst=ep.mac) /
IP(src=l['ip'], dst=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rx = self.send_and_expect(self.pg2, p * NUM_PKTS, self.pg0)
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l2_1.sw_if_index,
mac=l['mac']))
#
# static EP cannot reach the learnt EPs since there is no contract
# only test 1 EP as the others could timeout
#
p = (Ether(src=ep.mac, dst=l['mac']) /
IP(dst=learnt[0]['ip'], src=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_assert_no_replies(self.pg0, [p])
#
# refresh the entries after the check for no replies above
#
for l in learnt:
# a packet with an sclass from a known EPG
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[1].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=99, gpid=113, flags=0x88, gpflags='A') /
Ether(src=l['mac'], dst=ep.mac) /
IP(src=l['ip'], dst=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rx = self.send_and_expect(self.pg2, p * NUM_PKTS, self.pg0)
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l2_1.sw_if_index,
mac=l['mac']))
#
# Add the contract so they can talk
#
acl = VppGbpAcl(self)
rule = acl.create_rule(permit_deny=1, proto=17)
rule2 = acl.create_rule(is_ipv6=1, permit_deny=1, proto=17)
acl_index = acl.add_vpp_config([rule, rule2])
c1 = VppGbpContract(
self, 401, epg_220.sclass, epg_330.sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c1.add_vpp_config()
for l in learnt:
p = (Ether(src=ep.mac, dst=l['mac']) /
IP(dst=l['ip'], src=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect(self.pg0, [p], self.pg2)
#
# send UU packets from the local EP
#
self.logger.info(self.vapi.cli("sh gbp bridge"))
self.logger.info(self.vapi.cli("sh bridge-domain 1 detail"))
p_uu = (Ether(src=ep.mac, dst="00:11:11:11:11:11") /
IP(dst="10.0.0.133", src=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(ep.itf, [p_uu], gbd1.uu_fwd)
self.logger.info(self.vapi.cli("sh bridge 1 detail"))
p_bm = (Ether(src=ep.mac, dst="ff:ff:ff:ff:ff:ff") /
IP(dst="10.0.0.133", src=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect_only(ep.itf, [p_bm], tun_bm.mcast_itf)
for rx in rxs:
self.assertEqual(rx[IP].src, self.pg4.local_ip4)
self.assertEqual(rx[IP].dst, "239.1.1.1")
self.assertEqual(rx[UDP].dport, 48879)
# the UDP source port is a random value for hashing
self.assertEqual(rx[VXLAN].gpid, 112)
self.assertEqual(rx[VXLAN].vni, 88)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
self.assertFalse(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
acl = VppGbpAcl(self)
rule = acl.create_rule(permit_deny=1, proto=17)
rule2 = acl.create_rule(is_ipv6=1, permit_deny=1, proto=17)
acl_index = acl.add_vpp_config([rule, rule2])
c2 = VppGbpContract(
self, 401, epg_330.sclass, epg_220.sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c2.add_vpp_config()
for l in learnt:
self.wait_for_ep_timeout(vx_tun_l2_1.sw_if_index,
mac=l['mac'])
#
# Check v6 Endpoints learning
#
for l in learnt:
# a packet with an sclass from a known EPG
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[1].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=99, gpid=113, flags=0x88) /
Ether(src=l['mac'], dst=ep.mac) /
IPv6(src=l['ip6'], dst=ep.ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rx = self.send_and_expect(self.pg2, p * NUM_PKTS, self.pg0)
rx = self.send_and_expect(self.pg2, p * NUM_PKTS, self.pg0)
self.assertTrue(find_gbp_endpoint(
self,
vx_tun_l2_1.sw_if_index,
ip=l['ip6'],
tep=[self.pg2.local_ip4,
self.pg2.remote_hosts[1].ip4]))
self.logger.info(self.vapi.cli("sh int"))
self.logger.info(self.vapi.cli("sh vxlan-gbp tunnel"))
self.logger.info(self.vapi.cli("sh gbp vxlan"))
self.logger.info(self.vapi.cli("sh gbp endpoint"))
self.logger.info(self.vapi.cli("sh gbp interface"))
#
# EP moves to a different TEP
#
for l in learnt:
# a packet with an sclass from a known EPG
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[2].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=99, gpid=113, flags=0x88) /
Ether(src=l['mac'], dst=ep.mac) /
IPv6(src=l['ip6'], dst=ep.ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rx = self.send_and_expect(self.pg2, p * 1, self.pg0)
rx = self.send_and_expect(self.pg2, p * NUM_PKTS, self.pg0)
self.assertTrue(find_gbp_endpoint(
self,
vx_tun_l2_1.sw_if_index,
sclass=113,
mac=l['mac'],
tep=[self.pg2.local_ip4,
self.pg2.remote_hosts[2].ip4]))
#
# v6 remote EP reachability
#
for l in learnt:
p = (Ether(src=ep.mac, dst=l['mac']) /
IPv6(dst=l['ip6'], src=ep.ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * NUM_PKTS, self.pg2)
for rx in rxs:
self.assertEqual(rx[IP].src, self.pg2.local_ip4)
self.assertEqual(rx[IP].dst, self.pg2.remote_hosts[2].ip4)
self.assertEqual(rx[UDP].dport, 48879)
# the UDP source port is a random value for hashing
self.assertEqual(rx[VXLAN].gpid, 112)
self.assertEqual(rx[VXLAN].vni, 99)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
self.assertEqual(rx[IPv6].dst, l['ip6'])
#
# EP changes sclass
#
for l in learnt:
# a packet with an sclass from a known EPG
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[2].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=99, gpid=112, flags=0x88) /
Ether(src=l['mac'], dst=ep.mac) /
IPv6(src=l['ip6'], dst=ep.ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rx = self.send_and_expect(self.pg2, p * 1, self.pg0)
rx = self.send_and_expect(self.pg2, p * NUM_PKTS, self.pg0)
self.assertTrue(find_gbp_endpoint(
self,
vx_tun_l2_1.sw_if_index,
mac=l['mac'],
sclass=112,
tep=[self.pg2.local_ip4,
self.pg2.remote_hosts[2].ip4]))
#
# check reachability and contract intra-epg
#
allow_intra_class = self.statistics.get_err_counter(
'/err/gbp-policy-mac/allow-intra-sclass')
for l in learnt:
p = (Ether(src=ep.mac, dst=l['mac']) /
IPv6(dst=l['ip6'], src=ep.ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * NUM_PKTS, self.pg2)
for rx in rxs:
self.assertEqual(rx[IP].src, self.pg2.local_ip4)
self.assertEqual(rx[IP].dst, self.pg2.remote_hosts[2].ip4)
self.assertEqual(rx[UDP].dport, 48879)
self.assertEqual(rx[VXLAN].gpid, 112)
self.assertEqual(rx[VXLAN].vni, 99)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
self.assertEqual(rx[IPv6].dst, l['ip6'])
allow_intra_class += NUM_PKTS
self.assert_error_counter_equal(
'/err/gbp-policy-mac/allow-intra-sclass',
allow_intra_class)
#
# clean up
#
for l in learnt:
self.wait_for_ep_timeout(vx_tun_l2_1.sw_if_index,
mac=l['mac'])
self.pg2.unconfig_ip4()
self.pg3.unconfig_ip4()
self.pg4.unconfig_ip4()
def test_gbp_contract(self):
""" GBP Contracts """
#
# Route Domains
#
gt4 = VppIpTable(self, 0)
gt4.add_vpp_config()
gt6 = VppIpTable(self, 0, is_ip6=True)
gt6.add_vpp_config()
rd0 = VppGbpRouteDomain(self, 0, 400, gt4, gt6, None, None)
rd0.add_vpp_config()
#
# Bridge Domains
#
bd1 = VppBridgeDomain(self, 1, arp_term=0)
bd2 = VppBridgeDomain(self, 2, arp_term=0)
bd1.add_vpp_config()
bd2.add_vpp_config()
gbd1 = VppGbpBridgeDomain(self, bd1, rd0, self.loop0)
gbd2 = VppGbpBridgeDomain(self, bd2, rd0, self.loop1)
gbd1.add_vpp_config()
gbd2.add_vpp_config()
#
# 3 EPGs, 2 of which share a BD.
#
epgs = [VppGbpEndpointGroup(self, 220, 1220, rd0, gbd1,
None, self.loop0,
"10.0.0.128", "2001:10::128"),
VppGbpEndpointGroup(self, 221, 1221, rd0, gbd1,
None, self.loop0,
"10.0.1.128", "2001:10:1::128"),
VppGbpEndpointGroup(self, 222, 1222, rd0, gbd2,
None, self.loop1,
"10.0.2.128", "2001:10:2::128")]
#
# 4 end-points, 2 in the same subnet, 3 in the same BD
#
eps = [VppGbpEndpoint(self, self.pg0,
epgs[0], None,
"10.0.0.1", "11.0.0.1",
"2001:10::1", "3001::1"),
VppGbpEndpoint(self, self.pg1,
epgs[0], None,
"10.0.0.2", "11.0.0.2",
"2001:10::2", "3001::2"),
VppGbpEndpoint(self, self.pg2,
epgs[1], None,
"10.0.1.1", "11.0.0.3",
"2001:10:1::1", "3001::3"),
VppGbpEndpoint(self, self.pg3,
epgs[2], None,
"10.0.2.1", "11.0.0.4",
"2001:10:2::1", "3001::4")]
#
# Config related to each of the EPGs
#
for epg in epgs:
# IP config on the BVI interfaces
if epg != epgs[1]:
VppIpInterfaceBind(self, epg.bvi, epg.rd.t4).add_vpp_config()
VppIpInterfaceBind(self, epg.bvi, epg.rd.t6).add_vpp_config()
epg.bvi.set_mac(self.router_mac)
if_ip4 = VppIpInterfaceAddress(self, epg.bvi, epg.bvi_ip4, 32)
if_ip6 = VppIpInterfaceAddress(self, epg.bvi, epg.bvi_ip6, 128)
if_ip4.add_vpp_config()
if_ip6.add_vpp_config()
# add the BD ARP termination entry for BVI IP
epg.bd_arp_ip4 = VppBridgeDomainArpEntry(self, epg.bd.bd,
str(self.router_mac),
epg.bvi_ip4)
epg.bd_arp_ip4.add_vpp_config()
# EPG in VPP
epg.add_vpp_config()
#
# config ep
#
for ep in eps:
ep.add_vpp_config()
self.logger.info(self.vapi.cli("show gbp endpoint"))
self.logger.info(self.vapi.cli("show interface"))
self.logger.info(self.vapi.cli("show br"))
#
# Intra epg allowed without contract
#
pkt_intra_epg_220_to_220 = (Ether(src=self.pg0.remote_mac,
dst=self.pg1.remote_mac) /
IP(src=eps[0].ip4,
dst=eps[1].ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect_bridged(self.pg0,
pkt_intra_epg_220_to_220 * 65,
self.pg1)
pkt_intra_epg_220_to_220 = (Ether(src=self.pg0.remote_mac,
dst=self.pg1.remote_mac) /
IPv6(src=eps[0].ip6,
dst=eps[1].ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect_bridged6(self.pg0,
pkt_intra_epg_220_to_220 * 65,
self.pg1)
#
# Inter epg denied without contract
#
pkt_inter_epg_220_to_221 = (Ether(src=self.pg0.remote_mac,
dst=self.pg2.remote_mac) /
IP(src=eps[0].ip4,
dst=eps[2].ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_assert_no_replies(self.pg0, pkt_inter_epg_220_to_221)
#
# A uni-directional contract from EPG 220 -> 221
#
acl = VppGbpAcl(self)
rule = acl.create_rule(permit_deny=1, proto=17)
rule2 = acl.create_rule(is_ipv6=1, permit_deny=1, proto=17)
rule3 = acl.create_rule(permit_deny=1, proto=1)
acl_index = acl.add_vpp_config([rule, rule2, rule3])
c1 = VppGbpContract(
self, 400, epgs[0].sclass, epgs[1].sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c1.add_vpp_config()
self.send_and_expect_bridged(eps[0].itf,
pkt_inter_epg_220_to_221 * 65,
eps[2].itf)
pkt_inter_epg_220_to_222 = (Ether(src=self.pg0.remote_mac,
dst=str(self.router_mac)) /
IP(src=eps[0].ip4,
dst=eps[3].ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_assert_no_replies(eps[0].itf,
pkt_inter_epg_220_to_222 * 65)
#
# ping router IP in different BD
#
pkt_router_ping_220_to_221 = (Ether(src=self.pg0.remote_mac,
dst=str(self.router_mac)) /
IP(src=eps[0].ip4,
dst=epgs[1].bvi_ip4) /
ICMP(type='echo-request'))
self.send_and_expect(self.pg0, [pkt_router_ping_220_to_221], self.pg0)
pkt_router_ping_220_to_221 = (Ether(src=self.pg0.remote_mac,
dst=str(self.router_mac)) /
IPv6(src=eps[0].ip6,
dst=epgs[1].bvi_ip6) /
ICMPv6EchoRequest())
self.send_and_expect(self.pg0, [pkt_router_ping_220_to_221], self.pg0)
#
# contract for the return direction
#
c2 = VppGbpContract(
self, 400, epgs[1].sclass, epgs[0].sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c2.add_vpp_config()
self.send_and_expect_bridged(eps[0].itf,
pkt_inter_epg_220_to_221 * 65,
eps[2].itf)
pkt_inter_epg_221_to_220 = (Ether(src=self.pg2.remote_mac,
dst=self.pg0.remote_mac) /
IP(src=eps[2].ip4,
dst=eps[0].ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect_bridged(eps[2].itf,
pkt_inter_epg_221_to_220 * 65,
eps[0].itf)
pkt_inter_epg_221_to_220 = (Ether(src=self.pg2.remote_mac,
dst=str(self.router_mac)) /
IP(src=eps[2].ip4,
dst=eps[0].ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect_routed(eps[2].itf,
pkt_inter_epg_221_to_220 * 65,
eps[0].itf,
str(self.router_mac))
pkt_inter_epg_221_to_220 = (Ether(src=self.pg2.remote_mac,
dst=str(self.router_mac)) /
IPv6(src=eps[2].ip6,
dst=eps[0].ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect_routed6(eps[2].itf,
pkt_inter_epg_221_to_220 * 65,
eps[0].itf,
str(self.router_mac))
#
# contract between 220 and 222 uni-direction
#
c3 = VppGbpContract(
self, 400, epgs[0].sclass, epgs[2].sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c3.add_vpp_config()
self.send_and_expect(eps[0].itf,
pkt_inter_epg_220_to_222 * 65,
eps[3].itf)
c3.remove_vpp_config()
c1.remove_vpp_config()
c2.remove_vpp_config()
acl.remove_vpp_config()
def test_gbp_bd_drop_flags(self):
""" GBP BD drop flags """
#
# IP tables
#
gt4 = VppIpTable(self, 1)
gt4.add_vpp_config()
gt6 = VppIpTable(self, 1, is_ip6=True)
gt6.add_vpp_config()
rd1 = VppGbpRouteDomain(self, 1, 401, gt4, gt6)
rd1.add_vpp_config()
#
# a GBP bridge domain with a BVI only
#
bd1 = VppBridgeDomain(self, 1)
bd1.add_vpp_config()
gbd1 = VppGbpBridgeDomain(self, bd1, rd1, self.loop0,
None, None,
uu_drop=True, bm_drop=True)
gbd1.add_vpp_config()
self.logger.info(self.vapi.cli("sh bridge 1 detail"))
self.logger.info(self.vapi.cli("sh gbp bridge"))
# ... and has a /32 applied
ip_addr = VppIpInterfaceAddress(self, gbd1.bvi, "10.0.0.128", 32)
ip_addr.add_vpp_config()
#
# The Endpoint-group
#
epg_220 = VppGbpEndpointGroup(self, 220, 112, rd1, gbd1,
None, self.loop0,
"10.0.0.128",
"2001:10::128",
VppGbpEndpointRetention(2))
epg_220.add_vpp_config()
ep = VppGbpEndpoint(self, self.pg0,
epg_220, None,
"10.0.0.127", "11.0.0.127",
"2001:10::1", "3001::1")
ep.add_vpp_config()
#
# send UU/BM packet from the local EP with UU drop and BM drop enabled
# in bd
#
self.logger.info(self.vapi.cli("sh bridge 1 detail"))
self.logger.info(self.vapi.cli("sh gbp bridge"))
p_uu = (Ether(src=ep.mac, dst="00:11:11:11:11:11") /
IP(dst="10.0.0.133", src=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_assert_no_replies(ep.itf, [p_uu])
p_bm = (Ether(src=ep.mac, dst="ff:ff:ff:ff:ff:ff") /
IP(dst="10.0.0.133", src=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_assert_no_replies(ep.itf, [p_bm])
self.pg3.unconfig_ip4()
self.logger.info(self.vapi.cli("sh int"))
def test_gbp_bd_arp_flags(self):
""" GBP BD arp flags """
#
# IP tables
#
gt4 = VppIpTable(self, 1)
gt4.add_vpp_config()
gt6 = VppIpTable(self, 1, is_ip6=True)
gt6.add_vpp_config()
rd1 = VppGbpRouteDomain(self, 1, 401, gt4, gt6)
rd1.add_vpp_config()
#
# Pg4 hosts the IP6 UU-flood VXLAN tunnel
#
self.pg4.config_ip4()
self.pg4.resolve_arp()
#
# Add a mcast destination VXLAN-GBP tunnel for B&M traffic
#
tun_uu = VppVxlanGbpTunnel(self, self.pg4.local_ip4,
"239.1.1.1", 88,
mcast_itf=self.pg4)
tun_uu.add_vpp_config()
#
# a GBP bridge domain with a BVI and a UU-flood interface
#
bd1 = VppBridgeDomain(self, 1)
bd1.add_vpp_config()
gbd1 = VppGbpBridgeDomain(self, bd1, rd1, self.loop0,
tun_uu, None,
ucast_arp=True)
gbd1.add_vpp_config()
# ... and has a /32 applied
ip_addr = VppIpInterfaceAddress(self, gbd1.bvi, "10.0.0.128", 32)
ip_addr.add_vpp_config()
#
# The Endpoint-group
#
epg_220 = VppGbpEndpointGroup(self, 220, 112, rd1, gbd1,
None, self.loop0,
"10.0.0.128",
"2001:10::128",
VppGbpEndpointRetention(2))
epg_220.add_vpp_config()
ep = VppGbpEndpoint(self, self.pg0,
epg_220, None,
"10.0.0.127", "11.0.0.127",
"2001:10::1", "3001::1")
ep.add_vpp_config()
#
# send ARP packet from the local EP expect it on the uu interface
#
self.logger.info(self.vapi.cli("sh bridge 1 detail"))
self.logger.info(self.vapi.cli("sh gbp bridge"))
p_arp = (Ether(src=ep.mac, dst="ff:ff:ff:ff:ff:ff") /
ARP(op="who-has",
psrc=ep.ip4, pdst="10.0.0.99",
hwsrc=ep.mac,
hwdst="ff:ff:ff:ff:ff:ff"))
self.send_and_expect(ep.itf, [p_arp], self.pg4)
self.pg4.unconfig_ip4()
def test_gbp_learn_vlan_l2(self):
""" GBP L2 Endpoint w/ VLANs"""
ep_flags = VppEnum.vl_api_gbp_endpoint_flags_t
learnt = [{'mac': '00:00:11:11:11:01',
'ip': '10.0.0.1',
'ip6': '2001:10::2'},
{'mac': '00:00:11:11:11:02',
'ip': '10.0.0.2',
'ip6': '2001:10::3'}]
#
# IP tables
#
gt4 = VppIpTable(self, 1)
gt4.add_vpp_config()
gt6 = VppIpTable(self, 1, is_ip6=True)
gt6.add_vpp_config()
rd1 = VppGbpRouteDomain(self, 1, 401, gt4, gt6)
rd1.add_vpp_config()
#
# Pg2 hosts the vxlan tunnel, hosts on pg2 to act as TEPs
#
self.pg2.config_ip4()
self.pg2.resolve_arp()
self.pg2.generate_remote_hosts(4)
self.pg2.configure_ipv4_neighbors()
self.pg3.config_ip4()
self.pg3.resolve_arp()
#
# The EP will be on a vlan sub-interface
#
vlan_11 = VppDot1QSubint(self, self.pg0, 11)
vlan_11.admin_up()
self.vapi.l2_interface_vlan_tag_rewrite(
sw_if_index=vlan_11.sw_if_index, vtr_op=L2_VTR_OP.L2_POP_1,
push_dot1q=11)
bd_uu_fwd = VppVxlanGbpTunnel(self, self.pg3.local_ip4,
self.pg3.remote_ip4, 116)
bd_uu_fwd.add_vpp_config()
#
# a GBP bridge domain with a BVI and a UU-flood interface
# The BD is marked as do not learn, so no endpoints are ever
# learnt in this BD.
#
bd1 = VppBridgeDomain(self, 1)
bd1.add_vpp_config()
gbd1 = VppGbpBridgeDomain(self, bd1, rd1, self.loop0, bd_uu_fwd,
learn=False)
gbd1.add_vpp_config()
self.logger.info(self.vapi.cli("sh bridge 1 detail"))
self.logger.info(self.vapi.cli("sh gbp bridge"))
# ... and has a /32 applied
ip_addr = VppIpInterfaceAddress(self, gbd1.bvi, "10.0.0.128", 32)
ip_addr.add_vpp_config()
#
# The Endpoint-group in which we are learning endpoints
#
epg_220 = VppGbpEndpointGroup(self, 220, 441, rd1, gbd1,
None, self.loop0,
"10.0.0.128",
"2001:10::128",
VppGbpEndpointRetention(2))
epg_220.add_vpp_config()
#
# The VXLAN GBP tunnel is a bridge-port and has L2 endpoint
# learning enabled
#
vx_tun_l2_1 = VppGbpVxlanTunnel(
self, 99, bd1.bd_id,
VppEnum.vl_api_gbp_vxlan_tunnel_mode_t.GBP_VXLAN_TUNNEL_MODE_L2,
self.pg2.local_ip4)
vx_tun_l2_1.add_vpp_config()
#
# A static endpoint that the learnt endpoints are trying to
# talk to
#
ep = VppGbpEndpoint(self, vlan_11,
epg_220, None,
"10.0.0.127", "11.0.0.127",
"2001:10::1", "3001::1")
ep.add_vpp_config()
self.assertTrue(find_route(self, ep.ip4, 32, table_id=1))
#
# Send to the static EP
#
for ii, l in enumerate(learnt):
# a packet with an sclass from a known EPG
# arriving on an unknown TEP
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[1].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=99, gpid=441, flags=0x88) /
Ether(src=l['mac'], dst=ep.mac) /
IP(src=l['ip'], dst=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg2, [p], self.pg0)
#
# packet to EP has the EP's vlan tag
#
for rx in rxs:
self.assertEqual(rx[Dot1Q].vlan, 11)
#
# the EP is not learnt since the BD setting prevents it
# also no TEP too
#
self.assertFalse(find_gbp_endpoint(self,
vx_tun_l2_1.sw_if_index,
mac=l['mac']))
self.assertEqual(INDEX_INVALID,
find_vxlan_gbp_tunnel(
self,
self.pg2.local_ip4,
self.pg2.remote_hosts[1].ip4,
99))
self.assertEqual(len(self.vapi.gbp_endpoint_dump()), 1)
#
# static to remotes
# we didn't learn the remotes so they are sent to the UU-fwd
#
for l in learnt:
p = (Ether(src=ep.mac, dst=l['mac']) /
Dot1Q(vlan=11) /
IP(dst=l['ip'], src=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * 17, self.pg3)
for rx in rxs:
self.assertEqual(rx[IP].src, self.pg3.local_ip4)
self.assertEqual(rx[IP].dst, self.pg3.remote_ip4)
self.assertEqual(rx[UDP].dport, 48879)
# the UDP source port is a random value for hashing
self.assertEqual(rx[VXLAN].gpid, 441)
self.assertEqual(rx[VXLAN].vni, 116)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
self.assertFalse(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
self.pg2.unconfig_ip4()
self.pg3.unconfig_ip4()
def test_gbp_learn_l3(self):
""" GBP L3 Endpoint Learning """
self.vapi.cli("set logging class gbp level debug")
ep_flags = VppEnum.vl_api_gbp_endpoint_flags_t
routed_dst_mac = "00:0c:0c:0c:0c:0c"
routed_src_mac = "00:22:bd:f8:19:ff"
learnt = [{'mac': '00:00:11:11:11:02',
'ip': '10.0.1.2',
'ip6': '2001:10::2'},
{'mac': '00:00:11:11:11:03',
'ip': '10.0.1.3',
'ip6': '2001:10::3'}]
#
# IP tables
#
t4 = VppIpTable(self, 1)
t4.add_vpp_config()
t6 = VppIpTable(self, 1, True)
t6.add_vpp_config()
tun_ip4_uu = VppVxlanGbpTunnel(self, self.pg4.local_ip4,
self.pg4.remote_ip4, 114)
tun_ip6_uu = VppVxlanGbpTunnel(self, self.pg4.local_ip4,
self.pg4.remote_ip4, 116)
tun_ip4_uu.add_vpp_config()
tun_ip6_uu.add_vpp_config()
rd1 = VppGbpRouteDomain(self, 2, 401, t4, t6, tun_ip4_uu, tun_ip6_uu)
rd1.add_vpp_config()
self.loop0.set_mac(self.router_mac)
#
# Bind the BVI to the RD
#
VppIpInterfaceBind(self, self.loop0, t4).add_vpp_config()
VppIpInterfaceBind(self, self.loop0, t6).add_vpp_config()
#
# Pg2 hosts the vxlan tunnel
# hosts on pg2 to act as TEPs
# pg3 is BD uu-fwd
# pg4 is RD uu-fwd
#
self.pg2.config_ip4()
self.pg2.resolve_arp()
self.pg2.generate_remote_hosts(4)
self.pg2.configure_ipv4_neighbors()
self.pg3.config_ip4()
self.pg3.resolve_arp()
self.pg4.config_ip4()
self.pg4.resolve_arp()
#
# a GBP bridge domain with a BVI and a UU-flood interface
#
bd1 = VppBridgeDomain(self, 1)
bd1.add_vpp_config()
gbd1 = VppGbpBridgeDomain(self, bd1, rd1, self.loop0, self.pg3)
gbd1.add_vpp_config()
self.logger.info(self.vapi.cli("sh bridge 1 detail"))
self.logger.info(self.vapi.cli("sh gbp bridge"))
self.logger.info(self.vapi.cli("sh gbp route"))
# ... and has a /32 and /128 applied
ip4_addr = VppIpInterfaceAddress(self, gbd1.bvi, "10.0.0.128", 32)
ip4_addr.add_vpp_config()
ip6_addr = VppIpInterfaceAddress(self, gbd1.bvi, "2001:10::128", 128)
ip6_addr.add_vpp_config()
#
# The Endpoint-group in which we are learning endpoints
#
epg_220 = VppGbpEndpointGroup(self, 220, 441, rd1, gbd1,
None, self.loop0,
"10.0.0.128",
"2001:10::128",
VppGbpEndpointRetention(2))
epg_220.add_vpp_config()
#
# The VXLAN GBP tunnel is in L3 mode with learning enabled
#
vx_tun_l3 = VppGbpVxlanTunnel(
self, 101, rd1.rd_id,
VppEnum.vl_api_gbp_vxlan_tunnel_mode_t.GBP_VXLAN_TUNNEL_MODE_L3,
self.pg2.local_ip4)
vx_tun_l3.add_vpp_config()
#
# A static endpoint that the learnt endpoints are trying to
# talk to
#
ep = VppGbpEndpoint(self, self.pg0,
epg_220, None,
"10.0.0.127", "11.0.0.127",
"2001:10::1", "3001::1")
ep.add_vpp_config()
#
# learn some remote IPv4 EPs
#
for ii, l in enumerate(learnt):
# a packet with an sclass from a known EPG
# arriving on an unknown TEP
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[1].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=101, gpid=441, flags=0x88) /
Ether(src=l['mac'], dst="00:00:00:11:11:11") /
IP(src=l['ip'], dst=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rx = self.send_and_expect(self.pg2, [p], self.pg0)
# the new TEP
tep1_sw_if_index = find_vxlan_gbp_tunnel(
self,
self.pg2.local_ip4,
self.pg2.remote_hosts[1].ip4,
vx_tun_l3.vni)
self.assertNotEqual(INDEX_INVALID, tep1_sw_if_index)
# endpoint learnt via the parent GBP-vxlan interface
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l3._sw_if_index,
ip=l['ip']))
#
# Static IPv4 EP replies to learnt
#
for l in learnt:
p = (Ether(src=ep.mac, dst=self.loop0.local_mac) /
IP(dst=l['ip'], src=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * 1, self.pg2)
for rx in rxs:
self.assertEqual(rx[IP].src, self.pg2.local_ip4)
self.assertEqual(rx[IP].dst, self.pg2.remote_hosts[1].ip4)
self.assertEqual(rx[UDP].dport, 48879)
# the UDP source port is a random value for hashing
self.assertEqual(rx[VXLAN].gpid, 441)
self.assertEqual(rx[VXLAN].vni, 101)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
inner = rx[VXLAN].payload
self.assertEqual(inner[Ether].src, routed_src_mac)
self.assertEqual(inner[Ether].dst, routed_dst_mac)
self.assertEqual(inner[IP].src, ep.ip4)
self.assertEqual(inner[IP].dst, l['ip'])
for l in learnt:
self.assertFalse(find_gbp_endpoint(self,
tep1_sw_if_index,
ip=l['ip']))
#
# learn some remote IPv6 EPs
#
for ii, l in enumerate(learnt):
# a packet with an sclass from a known EPG
# arriving on an unknown TEP
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[1].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=101, gpid=441, flags=0x88) /
Ether(src=l['mac'], dst="00:00:00:11:11:11") /
IPv6(src=l['ip6'], dst=ep.ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rx = self.send_and_expect(self.pg2, [p], self.pg0)
# the new TEP
tep1_sw_if_index = find_vxlan_gbp_tunnel(
self,
self.pg2.local_ip4,
self.pg2.remote_hosts[1].ip4,
vx_tun_l3.vni)
self.assertNotEqual(INDEX_INVALID, tep1_sw_if_index)
self.logger.info(self.vapi.cli("show gbp bridge"))
self.logger.info(self.vapi.cli("show vxlan-gbp tunnel"))
self.logger.info(self.vapi.cli("show gbp vxlan"))
self.logger.info(self.vapi.cli("show int addr"))
# endpoint learnt via the TEP
self.assertTrue(find_gbp_endpoint(self, ip=l['ip6']))
self.logger.info(self.vapi.cli("show gbp endpoint"))
self.logger.info(self.vapi.cli("show ip fib index 1 %s" % l['ip']))
#
# Static EP replies to learnt
#
for l in learnt:
p = (Ether(src=ep.mac, dst=self.loop0.local_mac) /
IPv6(dst=l['ip6'], src=ep.ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * NUM_PKTS, self.pg2)
for rx in rxs:
self.assertEqual(rx[IP].src, self.pg2.local_ip4)
self.assertEqual(rx[IP].dst, self.pg2.remote_hosts[1].ip4)
self.assertEqual(rx[UDP].dport, 48879)
# the UDP source port is a random value for hashing
self.assertEqual(rx[VXLAN].gpid, 441)
self.assertEqual(rx[VXLAN].vni, 101)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
inner = rx[VXLAN].payload
self.assertEqual(inner[Ether].src, routed_src_mac)
self.assertEqual(inner[Ether].dst, routed_dst_mac)
self.assertEqual(inner[IPv6].src, ep.ip6)
self.assertEqual(inner[IPv6].dst, l['ip6'])
self.logger.info(self.vapi.cli("sh gbp endpoint"))
for l in learnt:
self.wait_for_ep_timeout(ip=l['ip'])
#
# Static sends to unknown EP with no route
#
p = (Ether(src=ep.mac, dst=self.loop0.local_mac) /
IP(dst="10.0.0.99", src=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_assert_no_replies(self.pg0, [p])
#
# Add a route to static EP's v4 and v6 subnet
#
se_10_24 = VppGbpSubnet(
self, rd1, "10.0.0.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_TRANSPORT)
se_10_24.add_vpp_config()
#
# static pings router
#
p = (Ether(src=ep.mac, dst=self.loop0.local_mac) /
IP(dst=epg_220.bvi_ip4, src=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect(self.pg0, p * NUM_PKTS, self.pg0)
p = (Ether(src=ep.mac, dst=self.loop0.local_mac) /
IPv6(dst=epg_220.bvi_ip6, src=ep.ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
self.send_and_expect(self.pg0, p * NUM_PKTS, self.pg0)
#
# packets to address in the subnet are sent on the uu-fwd
#
p = (Ether(src=ep.mac, dst=self.loop0.local_mac) /
IP(dst="10.0.0.99", src=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, [p], self.pg4)
for rx in rxs:
self.assertEqual(rx[IP].src, self.pg4.local_ip4)
self.assertEqual(rx[IP].dst, self.pg4.remote_ip4)
self.assertEqual(rx[UDP].dport, 48879)
# the UDP source port is a random value for hashing
self.assertEqual(rx[VXLAN].gpid, 441)
self.assertEqual(rx[VXLAN].vni, 114)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# policy is not applied to packets sent to the uu-fwd interfaces
self.assertFalse(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
#
# learn some remote IPv4 EPs
#
for ii, l in enumerate(learnt):
# a packet with an sclass from a known EPG
# arriving on an unknown TEP
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[2].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=101, gpid=441, flags=0x88) /
Ether(src=l['mac'], dst="00:00:00:11:11:11") /
IP(src=l['ip'], dst=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rx = self.send_and_expect(self.pg2, [p], self.pg0)
# the new TEP
tep1_sw_if_index = find_vxlan_gbp_tunnel(
self,
self.pg2.local_ip4,
self.pg2.remote_hosts[2].ip4,
vx_tun_l3.vni)
self.assertNotEqual(INDEX_INVALID, tep1_sw_if_index)
# endpoint learnt via the parent GBP-vxlan interface
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l3._sw_if_index,
ip=l['ip']))
#
# Add a remote endpoint from the API
#
rep_88 = VppGbpEndpoint(self, vx_tun_l3,
epg_220, None,
"10.0.0.88", "11.0.0.88",
"2001:10::88", "3001::88",
ep_flags.GBP_API_ENDPOINT_FLAG_REMOTE,
self.pg2.local_ip4,
self.pg2.remote_hosts[2].ip4,
mac=None)
rep_88.add_vpp_config()
#
# Add a remote endpoint from the API that matches an existing one
# this is a lower priority, hence the packet is sent to the DP leanrt
# TEP
#
rep_2 = VppGbpEndpoint(self, vx_tun_l3,
epg_220, None,
learnt[0]['ip'], "11.0.0.101",
learnt[0]['ip6'], "3001::101",
ep_flags.GBP_API_ENDPOINT_FLAG_REMOTE,
self.pg2.local_ip4,
self.pg2.remote_hosts[1].ip4,
mac=None)
rep_2.add_vpp_config()
#
# Add a route to the learned EP's v4 subnet
# packets should be send on the v4/v6 uu=fwd interface resp.
#
se_10_1_24 = VppGbpSubnet(
self, rd1, "10.0.1.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_TRANSPORT)
se_10_1_24.add_vpp_config()
self.logger.info(self.vapi.cli("show gbp endpoint"))
ips = ["10.0.0.88", learnt[0]['ip']]
for ip in ips:
p = (Ether(src=ep.mac, dst=self.loop0.local_mac) /
IP(dst=ip, src=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * NUM_PKTS, self.pg2)
for rx in rxs:
self.assertEqual(rx[IP].src, self.pg2.local_ip4)
self.assertEqual(rx[IP].dst, self.pg2.remote_hosts[2].ip4)
self.assertEqual(rx[UDP].dport, 48879)
# the UDP source port is a random value for hashing
self.assertEqual(rx[VXLAN].gpid, 441)
self.assertEqual(rx[VXLAN].vni, 101)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
inner = rx[VXLAN].payload
self.assertEqual(inner[Ether].src, routed_src_mac)
self.assertEqual(inner[Ether].dst, routed_dst_mac)
self.assertEqual(inner[IP].src, ep.ip4)
self.assertEqual(inner[IP].dst, ip)
#
# remove the API remote EPs, only API sourced is gone, the DP
# learnt one remains
#
rep_88.remove_vpp_config()
rep_2.remove_vpp_config()
self.assertTrue(find_gbp_endpoint(self, ip=rep_2.ip4))
p = (Ether(src=ep.mac, dst=self.loop0.local_mac) /
IP(src=ep.ip4, dst=rep_2.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, [p], self.pg2)
self.assertFalse(find_gbp_endpoint(self, ip=rep_88.ip4))
p = (Ether(src=ep.mac, dst=self.loop0.local_mac) /
IP(src=ep.ip4, dst=rep_88.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, [p], self.pg4)
#
# to appease the testcase we cannot have the registered EP still
# present (because it's DP learnt) when the TC ends so wait until
# it is removed
#
self.wait_for_ep_timeout(ip=rep_88.ip4)
self.wait_for_ep_timeout(ip=rep_2.ip4)
#
# Same as above, learn a remote EP via CP and DP
# this time remove the DP one first. expect the CP data to remain
#
rep_3 = VppGbpEndpoint(self, vx_tun_l3,
epg_220, None,
"10.0.1.4", "11.0.0.103",
"2001::10:3", "3001::103",
ep_flags.GBP_API_ENDPOINT_FLAG_REMOTE,
self.pg2.local_ip4,
self.pg2.remote_hosts[1].ip4,
mac=None)
rep_3.add_vpp_config()
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[2].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=101, gpid=441, flags=0x88) /
Ether(src=l['mac'], dst="00:00:00:11:11:11") /
IP(src="10.0.1.4", dst=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg2, p * NUM_PKTS, self.pg0)
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l3._sw_if_index,
ip=rep_3.ip4,
tep=[self.pg2.local_ip4,
self.pg2.remote_hosts[2].ip4]))
p = (Ether(src=ep.mac, dst=self.loop0.local_mac) /
IP(dst="10.0.1.4", src=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * NUM_PKTS, self.pg2)
# host 2 is the DP learned TEP
for rx in rxs:
self.assertEqual(rx[IP].src, self.pg2.local_ip4)
self.assertEqual(rx[IP].dst, self.pg2.remote_hosts[2].ip4)
self.wait_for_ep_timeout(ip=rep_3.ip4,
tep=[self.pg2.local_ip4,
self.pg2.remote_hosts[2].ip4])
rxs = self.send_and_expect(self.pg0, p * NUM_PKTS, self.pg2)
# host 1 is the CP learned TEP
for rx in rxs:
self.assertEqual(rx[IP].src, self.pg2.local_ip4)
self.assertEqual(rx[IP].dst, self.pg2.remote_hosts[1].ip4)
#
# shutdown with learnt endpoint present
#
p = (Ether(src=self.pg2.remote_mac,
dst=self.pg2.local_mac) /
IP(src=self.pg2.remote_hosts[1].ip4,
dst=self.pg2.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=101, gpid=441, flags=0x88) /
Ether(src=l['mac'], dst="00:00:00:11:11:11") /
IP(src=learnt[1]['ip'], dst=ep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rx = self.send_and_expect(self.pg2, [p], self.pg0)
# endpoint learnt via the parent GBP-vxlan interface
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l3._sw_if_index,
ip=l['ip']))
#
# TODO
# remote endpoint becomes local
#
self.pg2.unconfig_ip4()
self.pg3.unconfig_ip4()
self.pg4.unconfig_ip4()
def test_gbp_redirect(self):
""" GBP Endpoint Redirect """
self.vapi.cli("set logging class gbp level debug")
ep_flags = VppEnum.vl_api_gbp_endpoint_flags_t
routed_dst_mac = "00:0c:0c:0c:0c:0c"
routed_src_mac = "00:22:bd:f8:19:ff"
learnt = [{'mac': '00:00:11:11:11:02',
'ip': '10.0.1.2',
'ip6': '2001:10::2'},
{'mac': '00:00:11:11:11:03',
'ip': '10.0.1.3',
'ip6': '2001:10::3'}]
#
# IP tables
#
t4 = VppIpTable(self, 1)
t4.add_vpp_config()
t6 = VppIpTable(self, 1, True)
t6.add_vpp_config()
rd1 = VppGbpRouteDomain(self, 2, 402, t4, t6)
rd1.add_vpp_config()
self.loop0.set_mac(self.router_mac)
#
# Bind the BVI to the RD
#
VppIpInterfaceBind(self, self.loop0, t4).add_vpp_config()
VppIpInterfaceBind(self, self.loop0, t6).add_vpp_config()
#
# Pg7 hosts a BD's UU-fwd
#
self.pg7.config_ip4()
self.pg7.resolve_arp()
#
# a GBP bridge domains for the EPs
#
bd1 = VppBridgeDomain(self, 1)
bd1.add_vpp_config()
gbd1 = VppGbpBridgeDomain(self, bd1, rd1, self.loop0)
gbd1.add_vpp_config()
bd2 = VppBridgeDomain(self, 2)
bd2.add_vpp_config()
gbd2 = VppGbpBridgeDomain(self, bd2, rd1, self.loop1)
gbd2.add_vpp_config()
# ... and has a /32 and /128 applied
ip4_addr = VppIpInterfaceAddress(self, gbd1.bvi, "10.0.0.128", 32)
ip4_addr.add_vpp_config()
ip6_addr = VppIpInterfaceAddress(self, gbd1.bvi, "2001:10::128", 128)
ip6_addr.add_vpp_config()
ip4_addr = VppIpInterfaceAddress(self, gbd2.bvi, "10.0.1.128", 32)
ip4_addr.add_vpp_config()
ip6_addr = VppIpInterfaceAddress(self, gbd2.bvi, "2001:11::128", 128)
ip6_addr.add_vpp_config()
#
# The Endpoint-groups in which we are learning endpoints
#
epg_220 = VppGbpEndpointGroup(self, 220, 440, rd1, gbd1,
None, gbd1.bvi,
"10.0.0.128",
"2001:10::128",
VppGbpEndpointRetention(2))
epg_220.add_vpp_config()
epg_221 = VppGbpEndpointGroup(self, 221, 441, rd1, gbd2,
None, gbd2.bvi,
"10.0.1.128",
"2001:11::128",
VppGbpEndpointRetention(2))
epg_221.add_vpp_config()
epg_222 = VppGbpEndpointGroup(self, 222, 442, rd1, gbd1,
None, gbd1.bvi,
"10.0.2.128",
"2001:12::128",
VppGbpEndpointRetention(2))
epg_222.add_vpp_config()
#
# a GBP bridge domains for the SEPs
#
bd_uu1 = VppVxlanGbpTunnel(self, self.pg7.local_ip4,
self.pg7.remote_ip4, 116)
bd_uu1.add_vpp_config()
bd_uu2 = VppVxlanGbpTunnel(self, self.pg7.local_ip4,
self.pg7.remote_ip4, 117)
bd_uu2.add_vpp_config()
bd3 = VppBridgeDomain(self, 3)
bd3.add_vpp_config()
gbd3 = VppGbpBridgeDomain(self, bd3, rd1, self.loop2,
bd_uu1, learn=False)
gbd3.add_vpp_config()
bd4 = VppBridgeDomain(self, 4)
bd4.add_vpp_config()
gbd4 = VppGbpBridgeDomain(self, bd4, rd1, self.loop3,
bd_uu2, learn=False)
gbd4.add_vpp_config()
#
# EPGs in which the service endpoints exist
#
epg_320 = VppGbpEndpointGroup(self, 320, 550, rd1, gbd3,
None, gbd1.bvi,
"12.0.0.128",
"4001:10::128",
VppGbpEndpointRetention(2))
epg_320.add_vpp_config()
epg_321 = VppGbpEndpointGroup(self, 321, 551, rd1, gbd4,
None, gbd2.bvi,
"12.0.1.128",
"4001:11::128",
VppGbpEndpointRetention(2))
epg_321.add_vpp_config()
#
# three local endpoints
#
ep1 = VppGbpEndpoint(self, self.pg0,
epg_220, None,
"10.0.0.1", "11.0.0.1",
"2001:10::1", "3001:10::1")
ep1.add_vpp_config()
ep2 = VppGbpEndpoint(self, self.pg1,
epg_221, None,
"10.0.1.1", "11.0.1.1",
"2001:11::1", "3001:11::1")
ep2.add_vpp_config()
ep3 = VppGbpEndpoint(self, self.pg2,
epg_222, None,
"10.0.2.2", "11.0.2.2",
"2001:12::1", "3001:12::1")
ep3.add_vpp_config()
#
# service endpoints
#
sep1 = VppGbpEndpoint(self, self.pg3,
epg_320, None,
"12.0.0.1", "13.0.0.1",
"4001:10::1", "5001:10::1")
sep1.add_vpp_config()
sep2 = VppGbpEndpoint(self, self.pg4,
epg_320, None,
"12.0.0.2", "13.0.0.2",
"4001:10::2", "5001:10::2")
sep2.add_vpp_config()
sep3 = VppGbpEndpoint(self, self.pg5,
epg_321, None,
"12.0.1.1", "13.0.1.1",
"4001:11::1", "5001:11::1")
sep3.add_vpp_config()
# this EP is not installed immediately
sep4 = VppGbpEndpoint(self, self.pg6,
epg_321, None,
"12.0.1.2", "13.0.1.2",
"4001:11::2", "5001:11::2")
#
# an L2 switch packet between local EPs in different EPGs
# different dest ports on each so the are LB hashed differently
#
p4 = [(Ether(src=ep1.mac, dst=ep3.mac) /
IP(src=ep1.ip4, dst=ep3.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100)),
(Ether(src=ep3.mac, dst=ep1.mac) /
IP(src=ep3.ip4, dst=ep1.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))]
p6 = [(Ether(src=ep1.mac, dst=ep3.mac) /
IPv6(src=ep1.ip6, dst=ep3.ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100)),
(Ether(src=ep3.mac, dst=ep1.mac) /
IPv6(src=ep3.ip6, dst=ep1.ip6) /
UDP(sport=1234, dport=1230) /
Raw(b'\xa5' * 100))]
# should be dropped since no contract yet
self.send_and_assert_no_replies(self.pg0, [p4[0]])
self.send_and_assert_no_replies(self.pg0, [p6[0]])
#
# Add a contract with a rule to load-balance redirect via SEP1 and SEP2
# one of the next-hops is via an EP that is not known
#
acl = VppGbpAcl(self)
rule4 = acl.create_rule(permit_deny=1, proto=17)
rule6 = acl.create_rule(is_ipv6=1, permit_deny=1, proto=17)
acl_index = acl.add_vpp_config([rule4, rule6])
#
# test the src-ip hash mode
#
c1 = VppGbpContract(
self, 402, epg_220.sclass, epg_222.sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[VppGbpContractNextHop(sep1.vmac, sep1.epg.bd,
sep1.ip4, sep1.epg.rd),
VppGbpContractNextHop(sep2.vmac, sep2.epg.bd,
sep2.ip4, sep2.epg.rd)]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[VppGbpContractNextHop(sep3.vmac, sep3.epg.bd,
sep3.ip6, sep3.epg.rd),
VppGbpContractNextHop(sep4.vmac, sep4.epg.bd,
sep4.ip6, sep4.epg.rd)])],
[ETH_P_IP, ETH_P_IPV6])
c1.add_vpp_config()
c2 = VppGbpContract(
self, 402, epg_222.sclass, epg_220.sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[VppGbpContractNextHop(sep1.vmac, sep1.epg.bd,
sep1.ip4, sep1.epg.rd),
VppGbpContractNextHop(sep2.vmac, sep2.epg.bd,
sep2.ip4, sep2.epg.rd)]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[VppGbpContractNextHop(sep3.vmac, sep3.epg.bd,
sep3.ip6, sep3.epg.rd),
VppGbpContractNextHop(sep4.vmac, sep4.epg.bd,
sep4.ip6, sep4.epg.rd)])],
[ETH_P_IP, ETH_P_IPV6])
c2.add_vpp_config()
#
# send again with the contract preset, now packets arrive
# at SEP1 or SEP2 depending on the hashing
#
rxs = self.send_and_expect(self.pg0, p4[0] * 17, sep1.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep1.mac)
self.assertEqual(rx[IP].src, ep1.ip4)
self.assertEqual(rx[IP].dst, ep3.ip4)
rxs = self.send_and_expect(self.pg2, p4[1] * 17, sep2.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep2.mac)
self.assertEqual(rx[IP].src, ep3.ip4)
self.assertEqual(rx[IP].dst, ep1.ip4)
rxs = self.send_and_expect(self.pg0, p6[0] * 17, self.pg7)
for rx in rxs:
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
self.assertEqual(rx[VXLAN].vni, 117)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# redirect policy has been applied
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
inner = rx[VXLAN].payload
self.assertEqual(inner[Ether].src, routed_src_mac)
self.assertEqual(inner[Ether].dst, sep4.mac)
self.assertEqual(inner[IPv6].src, ep1.ip6)
self.assertEqual(inner[IPv6].dst, ep3.ip6)
rxs = self.send_and_expect(self.pg2, p6[1] * 17, sep3.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep3.mac)
self.assertEqual(rx[IPv6].src, ep3.ip6)
self.assertEqual(rx[IPv6].dst, ep1.ip6)
#
# programme the unknown EP
#
sep4.add_vpp_config()
rxs = self.send_and_expect(self.pg0, p6[0] * 17, sep4.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep4.mac)
self.assertEqual(rx[IPv6].src, ep1.ip6)
self.assertEqual(rx[IPv6].dst, ep3.ip6)
#
# and revert back to unprogrammed
#
sep4.remove_vpp_config()
rxs = self.send_and_expect(self.pg0, p6[0] * 17, self.pg7)
for rx in rxs:
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
self.assertEqual(rx[VXLAN].vni, 117)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# redirect policy has been applied
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
inner = rx[VXLAN].payload
self.assertEqual(inner[Ether].src, routed_src_mac)
self.assertEqual(inner[Ether].dst, sep4.mac)
self.assertEqual(inner[IPv6].src, ep1.ip6)
self.assertEqual(inner[IPv6].dst, ep3.ip6)
c1.remove_vpp_config()
c2.remove_vpp_config()
#
# test the symmetric hash mode
#
c1 = VppGbpContract(
self, 402, epg_220.sclass, epg_222.sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SYMMETRIC,
[VppGbpContractNextHop(sep1.vmac, sep1.epg.bd,
sep1.ip4, sep1.epg.rd),
VppGbpContractNextHop(sep2.vmac, sep2.epg.bd,
sep2.ip4, sep2.epg.rd)]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SYMMETRIC,
[VppGbpContractNextHop(sep3.vmac, sep3.epg.bd,
sep3.ip6, sep3.epg.rd),
VppGbpContractNextHop(sep4.vmac, sep4.epg.bd,
sep4.ip6, sep4.epg.rd)])],
[ETH_P_IP, ETH_P_IPV6])
c1.add_vpp_config()
c2 = VppGbpContract(
self, 402, epg_222.sclass, epg_220.sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SYMMETRIC,
[VppGbpContractNextHop(sep1.vmac, sep1.epg.bd,
sep1.ip4, sep1.epg.rd),
VppGbpContractNextHop(sep2.vmac, sep2.epg.bd,
sep2.ip4, sep2.epg.rd)]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SYMMETRIC,
[VppGbpContractNextHop(sep3.vmac, sep3.epg.bd,
sep3.ip6, sep3.epg.rd),
VppGbpContractNextHop(sep4.vmac, sep4.epg.bd,
sep4.ip6, sep4.epg.rd)])],
[ETH_P_IP, ETH_P_IPV6])
c2.add_vpp_config()
#
# send again with the contract preset, now packets arrive
# at SEP1 for both directions
#
rxs = self.send_and_expect(self.pg0, p4[0] * 17, sep1.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep1.mac)
self.assertEqual(rx[IP].src, ep1.ip4)
self.assertEqual(rx[IP].dst, ep3.ip4)
rxs = self.send_and_expect(self.pg2, p4[1] * 17, sep1.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep1.mac)
self.assertEqual(rx[IP].src, ep3.ip4)
self.assertEqual(rx[IP].dst, ep1.ip4)
#
# programme the unknown EP for the L3 tests
#
sep4.add_vpp_config()
#
# an L3 switch packet between local EPs in different EPGs
# different dest ports on each so the are LB hashed differently
#
p4 = [(Ether(src=ep1.mac, dst=str(self.router_mac)) /
IP(src=ep1.ip4, dst=ep2.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100)),
(Ether(src=ep2.mac, dst=str(self.router_mac)) /
IP(src=ep2.ip4, dst=ep1.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))]
p6 = [(Ether(src=ep1.mac, dst=str(self.router_mac)) /
IPv6(src=ep1.ip6, dst=ep2.ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100)),
(Ether(src=ep2.mac, dst=str(self.router_mac)) /
IPv6(src=ep2.ip6, dst=ep1.ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))]
c3 = VppGbpContract(
self, 402, epg_220.sclass, epg_221.sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SYMMETRIC,
[VppGbpContractNextHop(sep1.vmac, sep1.epg.bd,
sep1.ip4, sep1.epg.rd),
VppGbpContractNextHop(sep2.vmac, sep2.epg.bd,
sep2.ip4, sep2.epg.rd)]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SYMMETRIC,
[VppGbpContractNextHop(sep3.vmac, sep3.epg.bd,
sep3.ip6, sep3.epg.rd),
VppGbpContractNextHop(sep4.vmac, sep4.epg.bd,
sep4.ip6, sep4.epg.rd)])],
[ETH_P_IP, ETH_P_IPV6])
c3.add_vpp_config()
rxs = self.send_and_expect(self.pg0, p4[0] * 17, sep1.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep1.mac)
self.assertEqual(rx[IP].src, ep1.ip4)
self.assertEqual(rx[IP].dst, ep2.ip4)
#
# learn a remote EP in EPG 221
# packets coming from unknown remote EPs will be leant & redirected
#
vx_tun_l3 = VppGbpVxlanTunnel(
self, 444, rd1.rd_id,
VppEnum.vl_api_gbp_vxlan_tunnel_mode_t.GBP_VXLAN_TUNNEL_MODE_L3,
self.pg2.local_ip4)
vx_tun_l3.add_vpp_config()
c4 = VppGbpContract(
self, 402, epg_221.sclass, epg_220.sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[VppGbpContractNextHop(sep1.vmac, sep1.epg.bd,
sep1.ip4, sep1.epg.rd),
VppGbpContractNextHop(sep2.vmac, sep2.epg.bd,
sep2.ip4, sep2.epg.rd)]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[VppGbpContractNextHop(sep3.vmac, sep3.epg.bd,
sep3.ip6, sep3.epg.rd),
VppGbpContractNextHop(sep4.vmac, sep4.epg.bd,
sep4.ip6, sep4.epg.rd)])],
[ETH_P_IP, ETH_P_IPV6])
c4.add_vpp_config()
p = (Ether(src=self.pg7.remote_mac,
dst=self.pg7.local_mac) /
IP(src=self.pg7.remote_ip4,
dst=self.pg7.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=444, gpid=441, flags=0x88) /
Ether(src="00:22:22:22:22:33", dst=str(self.router_mac)) /
IP(src="10.0.0.88", dst=ep1.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
# unknown remote EP to local EP redirected
rxs = self.send_and_expect(self.pg7, [p], sep1.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep1.mac)
self.assertEqual(rx[IP].src, "10.0.0.88")
self.assertEqual(rx[IP].dst, ep1.ip4)
# endpoint learnt via the parent GBP-vxlan interface
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l3._sw_if_index,
ip="10.0.0.88"))
p = (Ether(src=self.pg7.remote_mac,
dst=self.pg7.local_mac) /
IP(src=self.pg7.remote_ip4,
dst=self.pg7.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=444, gpid=441, flags=0x88) /
Ether(src="00:22:22:22:22:33", dst=str(self.router_mac)) /
IPv6(src="2001:10::88", dst=ep1.ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
# unknown remote EP to local EP redirected (ipv6)
rxs = self.send_and_expect(self.pg7, [p], sep3.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep3.mac)
self.assertEqual(rx[IPv6].src, "2001:10::88")
self.assertEqual(rx[IPv6].dst, ep1.ip6)
# endpoint learnt via the parent GBP-vxlan interface
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l3._sw_if_index,
ip="2001:10::88"))
#
# L3 switch from local to remote EP
#
p4 = [(Ether(src=ep1.mac, dst=str(self.router_mac)) /
IP(src=ep1.ip4, dst="10.0.0.88") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))]
p6 = [(Ether(src=ep1.mac, dst=str(self.router_mac)) /
IPv6(src=ep1.ip6, dst="2001:10::88") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))]
rxs = self.send_and_expect(self.pg0, p4[0] * 17, sep1.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep1.mac)
self.assertEqual(rx[IP].src, ep1.ip4)
self.assertEqual(rx[IP].dst, "10.0.0.88")
rxs = self.send_and_expect(self.pg0, p6[0] * 17, sep4.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep4.mac)
self.assertEqual(rx[IPv6].src, ep1.ip6)
self.assertEqual(rx[IPv6].dst, "2001:10::88")
#
# test the dst-ip hash mode
#
c5 = VppGbpContract(
self, 402, epg_220.sclass, epg_221.sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_DST_IP,
[VppGbpContractNextHop(sep1.vmac, sep1.epg.bd,
sep1.ip4, sep1.epg.rd),
VppGbpContractNextHop(sep2.vmac, sep2.epg.bd,
sep2.ip4, sep2.epg.rd)]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_DST_IP,
[VppGbpContractNextHop(sep3.vmac, sep3.epg.bd,
sep3.ip6, sep3.epg.rd),
VppGbpContractNextHop(sep4.vmac, sep4.epg.bd,
sep4.ip6, sep4.epg.rd)])],
[ETH_P_IP, ETH_P_IPV6])
c5.add_vpp_config()
rxs = self.send_and_expect(self.pg0, p4[0] * 17, sep1.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep1.mac)
self.assertEqual(rx[IP].src, ep1.ip4)
self.assertEqual(rx[IP].dst, "10.0.0.88")
rxs = self.send_and_expect(self.pg0, p6[0] * 17, sep3.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep3.mac)
self.assertEqual(rx[IPv6].src, ep1.ip6)
self.assertEqual(rx[IPv6].dst, "2001:10::88")
#
# a programmed remote SEP in EPG 320
#
# gbp vxlan tunnel for the remote SEP
vx_tun_l3_sep = VppGbpVxlanTunnel(
self, 555, rd1.rd_id,
VppEnum.vl_api_gbp_vxlan_tunnel_mode_t.GBP_VXLAN_TUNNEL_MODE_L3,
self.pg2.local_ip4)
vx_tun_l3_sep.add_vpp_config()
# remote SEP
sep5 = VppGbpEndpoint(self, vx_tun_l3_sep,
epg_320, None,
"12.0.0.10", "13.0.0.10",
"4001:10::10", "5001:10::10",
ep_flags.GBP_API_ENDPOINT_FLAG_REMOTE,
self.pg7.local_ip4,
self.pg7.remote_ip4,
mac=None)
sep5.add_vpp_config()
#
# local l3out redirect tests
#
# add local l3out
# the external bd
self.loop4.set_mac(self.router_mac)
VppIpInterfaceBind(self, self.loop4, t4).add_vpp_config()
VppIpInterfaceBind(self, self.loop4, t6).add_vpp_config()
ebd = VppBridgeDomain(self, 100)
ebd.add_vpp_config()
gebd = VppGbpBridgeDomain(self, ebd, rd1, self.loop4, None, None)
gebd.add_vpp_config()
# the external epg
eepg = VppGbpEndpointGroup(self, 888, 765, rd1, gebd,
None, gebd.bvi,
"10.1.0.128",
"2001:10:1::128",
VppGbpEndpointRetention(2))
eepg.add_vpp_config()
# add subnets to BVI
VppIpInterfaceAddress(
self,
gebd.bvi,
"10.1.0.128",
24).add_vpp_config()
VppIpInterfaceAddress(
self,
gebd.bvi,
"2001:10:1::128",
64).add_vpp_config()
# ... which are L3-out subnets
VppGbpSubnet(self, rd1, "10.1.0.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=765).add_vpp_config()
VppGbpSubnet(self, rd1, "2001:10:1::128", 64,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=765).add_vpp_config()
# external endpoints
VppL2Vtr(self, self.vlan_100, L2_VTR_OP.L2_POP_1).add_vpp_config()
eep1 = VppGbpEndpoint(self, self.vlan_100, eepg, None, "10.1.0.1",
"11.1.0.1", "2001:10:1::1", "3001:10:1::1",
ep_flags.GBP_API_ENDPOINT_FLAG_EXTERNAL)
eep1.add_vpp_config()
VppL2Vtr(self, self.vlan_101, L2_VTR_OP.L2_POP_1).add_vpp_config()
eep2 = VppGbpEndpoint(self, self.vlan_101, eepg, None, "10.1.0.2",
"11.1.0.2", "2001:10:1::2", "3001:10:1::2",
ep_flags.GBP_API_ENDPOINT_FLAG_EXTERNAL)
eep2.add_vpp_config()
# external subnets reachable though eep1 and eep2 respectively
VppIpRoute(self, "10.220.0.0", 24,
[VppRoutePath(eep1.ip4, eep1.epg.bvi.sw_if_index)],
table_id=t4.table_id).add_vpp_config()
VppGbpSubnet(self, rd1, "10.220.0.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=4220).add_vpp_config()
VppIpRoute(self, "10:220::", 64,
[VppRoutePath(eep1.ip6, eep1.epg.bvi.sw_if_index)],
table_id=t6.table_id).add_vpp_config()
VppGbpSubnet(self, rd1, "10:220::", 64,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=4220).add_vpp_config()
VppIpRoute(self, "10.221.0.0", 24,
[VppRoutePath(eep2.ip4, eep2.epg.bvi.sw_if_index)],
table_id=t4.table_id).add_vpp_config()
VppGbpSubnet(self, rd1, "10.221.0.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=4221).add_vpp_config()
VppIpRoute(self, "10:221::", 64,
[VppRoutePath(eep2.ip6, eep2.epg.bvi.sw_if_index)],
table_id=t6.table_id).add_vpp_config()
VppGbpSubnet(self, rd1, "10:221::", 64,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=4221).add_vpp_config()
#
# l3out redirect to remote (known, then unknown) SEP
#
# packets from 1 external subnet to the other
p = [(Ether(src=eep1.mac, dst=self.router_mac) /
Dot1Q(vlan=100) /
IP(src="10.220.0.17", dst="10.221.0.65") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100)),
(Ether(src=eep1.mac, dst=self.router_mac) /
Dot1Q(vlan=100) /
IPv6(src="10:220::17", dst="10:221::65") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))]
# packets should be dropped in absence of contract
self.send_and_assert_no_replies(self.pg0, p)
# contract redirecting to sep5
VppGbpContract(
self, 402, 4220, 4221, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_DST_IP,
[VppGbpContractNextHop(sep5.vmac, sep5.epg.bd,
sep5.ip4, sep5.epg.rd)]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_DST_IP,
[VppGbpContractNextHop(sep5.vmac, sep5.epg.bd,
sep5.ip6, sep5.epg.rd)])],
[ETH_P_IP, ETH_P_IPV6]).add_vpp_config()
rxs = self.send_and_expect(self.pg0, p, self.pg7)
for rx, tx in zip(rxs, p):
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
# this should use the programmed remote leaf TEP
self.assertEqual(rx[VXLAN].vni, 555)
self.assertEqual(rx[VXLAN].gpid, 4220)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# redirect policy has been applied
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertTrue(rx[VXLAN].gpflags.D)
rxip = rx[VXLAN][Ether].payload
txip = tx[Dot1Q].payload
self.assertEqual(rxip.src, txip.src)
self.assertEqual(rxip.dst, txip.dst)
# remote SEP: it is now an unknown remote SEP and should go
# to spine proxy
sep5.remove_vpp_config()
rxs = self.send_and_expect(self.pg0, p, self.pg7)
for rx, tx in zip(rxs, p):
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
# this should use the spine proxy TEP
self.assertEqual(rx[VXLAN].vni, epg_320.bd.uu_fwd.vni)
self.assertEqual(rx[VXLAN].gpid, 4220)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# redirect policy has been applied
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertTrue(rx[VXLAN].gpflags.D)
rxip = rx[VXLAN][Ether].payload
txip = tx[Dot1Q].payload
self.assertEqual(rxip.src, txip.src)
self.assertEqual(rxip.dst, txip.dst)
#
# l3out redirect to local SEP
#
# change the contract between l3out to redirect to local SEPs
# instead of remote SEP
VppGbpContract(
self, 402, 4220, 4221, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_DST_IP,
[VppGbpContractNextHop(sep1.vmac, sep1.epg.bd,
sep1.ip4, sep1.epg.rd)]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_DST_IP,
[VppGbpContractNextHop(sep1.vmac, sep1.epg.bd,
sep1.ip6, sep1.epg.rd)])],
[ETH_P_IP, ETH_P_IPV6]).add_vpp_config()
rxs = self.send_and_expect(self.pg0, p, sep1.itf)
for rx, tx in zip(rxs, p):
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep1.mac)
rxip = rx[Ether].payload
txip = tx[Ether].payload
self.assertEqual(rxip.src, txip.src)
self.assertEqual(rxip.dst, txip.dst)
#
# redirect remote EP to remote (known then unknown) SEP
#
# remote SEP known again
sep5.add_vpp_config()
# contract to redirect to learnt SEP
VppGbpContract(
self, 402, epg_221.sclass, epg_222.sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_DST_IP,
[VppGbpContractNextHop(sep5.vmac, sep5.epg.bd,
sep5.ip4, sep5.epg.rd)]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_DST_IP,
[VppGbpContractNextHop(sep5.vmac, sep5.epg.bd,
sep5.ip6, sep5.epg.rd)])],
[ETH_P_IP, ETH_P_IPV6]).add_vpp_config()
# packets from unknown EP 221 to known EP in EPG 222
# should be redirected to known remote SEP
base = (Ether(src=self.pg7.remote_mac, dst=self.pg7.local_mac) /
IP(src=self.pg7.remote_ip4, dst=self.pg7.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=444, gpid=441, flags=0x88) /
Ether(src="00:22:22:22:22:44", dst=str(self.router_mac)))
p = [(base /
IP(src="10.0.1.100", dst=ep3.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100)),
(base /
IPv6(src="2001:10::100", dst=ep3.ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))]
# unknown remote EP to local EP redirected to known remote SEP
rxs = self.send_and_expect(self.pg7, p, self.pg7)
for rx, tx in zip(rxs, p):
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
# this should use the programmed remote leaf TEP
self.assertEqual(rx[VXLAN].vni, 555)
self.assertEqual(rx[VXLAN].gpid, epg_221.sclass)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# redirect policy has been applied
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
rxip = rx[VXLAN][Ether].payload
txip = tx[VXLAN][Ether].payload
self.assertEqual(rxip.src, txip.src)
self.assertEqual(rxip.dst, txip.dst)
# endpoint learnt via the parent GBP-vxlan interface
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l3._sw_if_index,
ip="10.0.1.100"))
self.assertTrue(find_gbp_endpoint(self,
vx_tun_l3._sw_if_index,
ip="2001:10::100"))
# remote SEP: it is now an unknown remote SEP and should go
# to spine proxy
sep5.remove_vpp_config()
# remote EP (coming from spine proxy) to local EP redirected to
# known remote SEP
rxs = self.send_and_expect(self.pg7, p, self.pg7)
for rx, tx in zip(rxs, p):
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
# this should use the spine proxy TEP
self.assertEqual(rx[VXLAN].vni, epg_320.bd.uu_fwd.vni)
self.assertEqual(rx[VXLAN].gpid, epg_221.sclass)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# redirect policy has been applied
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
rxip = rx[VXLAN][Ether].payload
txip = tx[VXLAN][Ether].payload
self.assertEqual(rxip.src, txip.src)
self.assertEqual(rxip.dst, txip.dst)
#
# cleanup
#
self.pg7.unconfig_ip4()
def test_gbp_redirect_extended(self):
""" GBP Endpoint Redirect Extended """
self.vapi.cli("set logging class gbp level debug")
ep_flags = VppEnum.vl_api_gbp_endpoint_flags_t
routed_dst_mac = "00:0c:0c:0c:0c:0c"
routed_src_mac = "00:22:bd:f8:19:ff"
learnt = [{'mac': '00:00:11:11:11:02',
'ip': '10.0.1.2',
'ip6': '2001:10::2'},
{'mac': '00:00:11:11:11:03',
'ip': '10.0.1.3',
'ip6': '2001:10::3'}]
#
# IP tables
#
t4 = VppIpTable(self, 1)
t4.add_vpp_config()
t6 = VppIpTable(self, 1, True)
t6.add_vpp_config()
# create IPv4 and IPv6 RD UU VxLAN-GBP TEP and bind them to the right
# VRF
rd_uu4 = VppVxlanGbpTunnel(
self,
self.pg7.local_ip4,
self.pg7.remote_ip4,
114,
mode=(VppEnum.vl_api_vxlan_gbp_api_tunnel_mode_t.
VXLAN_GBP_API_TUNNEL_MODE_L3))
rd_uu4.add_vpp_config()
VppIpInterfaceBind(self, rd_uu4, t4).add_vpp_config()
rd_uu6 = VppVxlanGbpTunnel(
self,
self.pg7.local_ip4,
self.pg7.remote_ip4,
115,
mode=(VppEnum.vl_api_vxlan_gbp_api_tunnel_mode_t.
VXLAN_GBP_API_TUNNEL_MODE_L3))
rd_uu6.add_vpp_config()
VppIpInterfaceBind(self, rd_uu6, t4).add_vpp_config()
rd1 = VppGbpRouteDomain(self, 2, 402, t4, t6, rd_uu4, rd_uu6)
rd1.add_vpp_config()
self.loop0.set_mac(self.router_mac)
self.loop1.set_mac(self.router_mac)
self.loop2.set_mac(self.router_mac)
#
# Bind the BVI to the RD
#
VppIpInterfaceBind(self, self.loop0, t4).add_vpp_config()
VppIpInterfaceBind(self, self.loop0, t6).add_vpp_config()
VppIpInterfaceBind(self, self.loop1, t4).add_vpp_config()
VppIpInterfaceBind(self, self.loop1, t6).add_vpp_config()
VppIpInterfaceBind(self, self.loop2, t4).add_vpp_config()
VppIpInterfaceBind(self, self.loop2, t6).add_vpp_config()
#
# Pg7 hosts a BD's UU-fwd
#
self.pg7.config_ip4()
self.pg7.resolve_arp()
#
# a GBP bridge domains for the EPs
#
bd1 = VppBridgeDomain(self, 1)
bd1.add_vpp_config()
gbd1 = VppGbpBridgeDomain(self, bd1, rd1, self.loop0)
gbd1.add_vpp_config()
bd2 = VppBridgeDomain(self, 2)
bd2.add_vpp_config()
gbd2 = VppGbpBridgeDomain(self, bd2, rd1, self.loop1)
gbd2.add_vpp_config()
# ... and has a /32 and /128 applied
ip4_addr1 = VppIpInterfaceAddress(self, gbd1.bvi, "10.0.0.128", 32)
ip4_addr1.add_vpp_config()
ip6_addr1 = VppIpInterfaceAddress(self, gbd1.bvi, "2001:10::128", 128)
ip6_addr1.add_vpp_config()
ip4_addr2 = VppIpInterfaceAddress(self, gbd2.bvi, "10.0.1.128", 32)
ip4_addr2.add_vpp_config()
ip6_addr2 = VppIpInterfaceAddress(self, gbd2.bvi, "2001:11::128", 128)
ip6_addr2.add_vpp_config()
#
# The Endpoint-groups
#
epg_220 = VppGbpEndpointGroup(self, 220, 440, rd1, gbd1,
None, gbd1.bvi,
"10.0.0.128",
"2001:10::128",
VppGbpEndpointRetention(2))
epg_220.add_vpp_config()
epg_221 = VppGbpEndpointGroup(self, 221, 441, rd1, gbd2,
None, gbd2.bvi,
"10.0.1.128",
"2001:11::128",
VppGbpEndpointRetention(2))
epg_221.add_vpp_config()
#
# a GBP bridge domains for the SEPs
#
bd_uu3 = VppVxlanGbpTunnel(self, self.pg7.local_ip4,
self.pg7.remote_ip4, 116)
bd_uu3.add_vpp_config()
bd3 = VppBridgeDomain(self, 3)
bd3.add_vpp_config()
gbd3 = VppGbpBridgeDomain(self, bd3, rd1, self.loop2,
bd_uu3, learn=False)
gbd3.add_vpp_config()
ip4_addr3 = VppIpInterfaceAddress(self, gbd3.bvi, "12.0.0.128", 32)
ip4_addr3.add_vpp_config()
ip6_addr3 = VppIpInterfaceAddress(self, gbd3.bvi, "4001:10::128", 128)
ip6_addr3.add_vpp_config()
#
# self.logger.info(self.vapi.cli("show gbp bridge"))
# self.logger.info(self.vapi.cli("show vxlan-gbp tunnel"))
# self.logger.info(self.vapi.cli("show gbp vxlan"))
# self.logger.info(self.vapi.cli("show int addr"))
#
#
# EPGs in which the service endpoints exist
#
epg_320 = VppGbpEndpointGroup(self, 320, 550, rd1, gbd3,
None, gbd3.bvi,
"12.0.0.128",
"4001:10::128",
VppGbpEndpointRetention(2))
epg_320.add_vpp_config()
#
# endpoints
#
ep1 = VppGbpEndpoint(self, self.pg0,
epg_220, None,
"10.0.0.1", "11.0.0.1",
"2001:10::1", "3001:10::1")
ep1.add_vpp_config()
ep2 = VppGbpEndpoint(self, self.pg1,
epg_221, None,
"10.0.1.1", "11.0.1.1",
"2001:11::1", "3001:11::1")
ep2.add_vpp_config()
#
# service endpoints
#
sep1 = VppGbpEndpoint(self, self.pg3,
epg_320, None,
"12.0.0.1", "13.0.0.1",
"4001:10::1", "5001:10::1")
sep2 = VppGbpEndpoint(self, self.pg4,
epg_320, None,
"12.0.0.2", "13.0.0.2",
"4001:10::2", "5001:10::2")
# sep1 and sep2 are not added to config yet
# they are unknown for now
#
# add routes to EPG subnets
#
VppGbpSubnet(self, rd1, "10.0.0.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_TRANSPORT
).add_vpp_config()
VppGbpSubnet(self, rd1, "10.0.1.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_TRANSPORT
).add_vpp_config()
#
# Local host to known local host in different BD
# with SFC contract (source and destination are in
# one node and service endpoint in another node)
#
p4 = [(Ether(src=ep1.mac, dst=str(self.router_mac)) /
IP(src=ep1.ip4, dst=ep2.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100)),
(Ether(src=ep2.mac, dst=str(self.router_mac)) /
IP(src=ep2.ip4, dst=ep1.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))]
p6 = [(Ether(src=ep1.mac, dst=str(self.router_mac)) /
IPv6(src=ep1.ip6, dst=ep2.ip6) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100)),
(Ether(src=ep2.mac, dst=str(self.router_mac)) /
IPv6(src=ep2.ip6, dst=ep1.ip6) /
UDP(sport=1234, dport=1230) /
Raw(b'\xa5' * 100))]
# should be dropped since no contract yet
self.send_and_assert_no_replies(self.pg0, [p4[0]])
self.send_and_assert_no_replies(self.pg0, [p6[0]])
#
# Add a contract with a rule to load-balance redirect via SEP1 and SEP2
# one of the next-hops is via an EP that is not known
#
acl = VppGbpAcl(self)
rule4 = acl.create_rule(permit_deny=1, proto=17)
rule6 = acl.create_rule(is_ipv6=1, permit_deny=1, proto=17)
acl_index = acl.add_vpp_config([rule4, rule6])
#
# test the src-ip hash mode
#
c1 = VppGbpContract(
self, 402, epg_220.sclass, epg_221.sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SYMMETRIC,
[VppGbpContractNextHop(sep1.vmac, sep1.epg.bd,
sep1.ip4, sep1.epg.rd)]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SYMMETRIC,
[VppGbpContractNextHop(sep1.vmac, sep1.epg.bd,
sep1.ip6, sep1.epg.rd)])],
[ETH_P_IP, ETH_P_IPV6])
c1.add_vpp_config()
c2 = VppGbpContract(
self, 402, epg_221.sclass, epg_220.sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SYMMETRIC,
[VppGbpContractNextHop(sep1.vmac, sep1.epg.bd,
sep1.ip4, sep1.epg.rd)]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_REDIRECT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SYMMETRIC,
[VppGbpContractNextHop(sep1.vmac, sep1.epg.bd,
sep1.ip6, sep1.epg.rd)])],
[ETH_P_IP, ETH_P_IPV6])
c2.add_vpp_config()
# ep1 <--> ep2 redirected through sep1
# sep1 is unknown
# packet is redirected to sep bd and then go through sep bd UU
rxs = self.send_and_expect(self.pg0, p4[0] * 17, self.pg7)
for rx in rxs:
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
self.assertEqual(rx[VXLAN].vni, 116)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# redirect policy has been applied
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
inner = rx[VXLAN].payload
self.assertEqual(inner[Ether].src, routed_src_mac)
self.assertEqual(inner[Ether].dst, sep1.mac)
self.assertEqual(inner[IP].src, ep1.ip4)
self.assertEqual(inner[IP].dst, ep2.ip4)
rxs = self.send_and_expect(self.pg1, p4[1] * 17, self.pg7)
for rx in rxs:
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
self.assertEqual(rx[VXLAN].vni, 116)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# redirect policy has been applied
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
inner = rx[VXLAN].payload
self.assertEqual(inner[Ether].src, routed_src_mac)
self.assertEqual(inner[Ether].dst, sep1.mac)
self.assertEqual(inner[IP].src, ep2.ip4)
self.assertEqual(inner[IP].dst, ep1.ip4)
rxs = self.send_and_expect(self.pg0, p6[0] * 17, self.pg7)
for rx in rxs:
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
self.assertEqual(rx[VXLAN].vni, 116)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# redirect policy has been applied
inner = rx[VXLAN].payload
self.assertEqual(inner[Ether].src, routed_src_mac)
self.assertEqual(inner[Ether].dst, sep1.mac)
self.assertEqual(inner[IPv6].src, ep1.ip6)
self.assertEqual(inner[IPv6].dst, ep2.ip6)
rxs = self.send_and_expect(self.pg1, p6[1] * 17, self.pg7)
for rx in rxs:
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
self.assertEqual(rx[VXLAN].vni, 116)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# redirect policy has been applied
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
inner = rx[VXLAN].payload
self.assertEqual(inner[Ether].src, routed_src_mac)
self.assertEqual(inner[Ether].dst, sep1.mac)
self.assertEqual(inner[IPv6].src, ep2.ip6)
self.assertEqual(inner[IPv6].dst, ep1.ip6)
# configure sep1: it is now local
# packets between ep1 and ep2 are redirected locally
sep1.add_vpp_config()
rxs = self.send_and_expect(self.pg0, p4[0] * 17, sep1.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep1.mac)
self.assertEqual(rx[IP].src, ep1.ip4)
self.assertEqual(rx[IP].dst, ep2.ip4)
rxs = self.send_and_expect(self.pg1, p6[1] * 17, sep1.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, routed_src_mac)
self.assertEqual(rx[Ether].dst, sep1.mac)
self.assertEqual(rx[IPv6].src, ep2.ip6)
self.assertEqual(rx[IPv6].dst, ep1.ip6)
# packet coming from the l2 spine-proxy to sep1
p = (Ether(src=self.pg7.remote_mac,
dst=self.pg7.local_mac) /
IP(src=self.pg7.remote_ip4,
dst=self.pg7.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=116, gpid=440, gpflags=0x08, flags=0x88) /
Ether(src=str(self.router_mac), dst=sep1.mac) /
IP(src=ep1.ip4, dst=ep2.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg7, [p] * 17, sep1.itf)
for rx in rxs:
self.assertEqual(rx[Ether].src, str(self.router_mac))
self.assertEqual(rx[Ether].dst, sep1.mac)
self.assertEqual(rx[IP].src, ep1.ip4)
self.assertEqual(rx[IP].dst, ep2.ip4)
# contract for SEP to communicate with dst EP
c3 = VppGbpContract(
self, 402, epg_320.sclass, epg_221.sclass, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SYMMETRIC),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SYMMETRIC)],
[ETH_P_IP, ETH_P_IPV6])
c3.add_vpp_config()
# temporarily remove ep2, so that ep2 is remote & unknown
ep2.remove_vpp_config()
# packet going back from sep1 to its original dest (ep2)
# as ep2 is now unknown (see above), it must go through
# the rd UU (packet is routed)
p1 = (Ether(src=sep1.mac, dst=self.router_mac) /
IP(src=ep1.ip4, dst=ep2.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg3, [p1] * 17, self.pg7)
for rx in rxs:
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
self.assertEqual(rx[VXLAN].vni, 114)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# redirect policy has been applied
inner = rx[VXLAN].payload
self.assertEqual(inner[Ether].src, routed_src_mac)
self.assertEqual(inner[Ether].dst, routed_dst_mac)
self.assertEqual(inner[IP].src, ep1.ip4)
self.assertEqual(inner[IP].dst, ep2.ip4)
self.logger.info(self.vapi.cli("show bridge 3 detail"))
sep1.remove_vpp_config()
self.logger.info(self.vapi.cli("show bridge 1 detail"))
self.logger.info(self.vapi.cli("show bridge 2 detail"))
# re-add ep2: it is local again :)
ep2.add_vpp_config()
# packet coming back from the remote sep through rd UU
p2 = (Ether(src=self.pg7.remote_mac,
dst=self.pg7.local_mac) /
IP(src=self.pg7.remote_ip4,
dst=self.pg7.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=114, gpid=441, gpflags=0x09, flags=0x88) /
Ether(src=str(self.router_mac), dst=self.router_mac) /
IP(src=ep1.ip4, dst=ep2.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg7, [p2], self.pg1)
for rx in rxs:
self.assertEqual(rx[Ether].src, str(self.router_mac))
self.assertEqual(rx[Ether].dst, self.pg1.remote_mac)
self.assertEqual(rx[IP].src, ep1.ip4)
self.assertEqual(rx[IP].dst, ep2.ip4)
#
# bd_uu2.add_vpp_config()
#
#
# cleanup
#
c1.remove_vpp_config()
c2.remove_vpp_config()
c3.remove_vpp_config()
self.pg7.unconfig_ip4()
def test_gbp_l3_out(self):
""" GBP L3 Out """
ep_flags = VppEnum.vl_api_gbp_endpoint_flags_t
self.vapi.cli("set logging class gbp level debug")
routed_dst_mac = "00:0c:0c:0c:0c:0c"
routed_src_mac = "00:22:bd:f8:19:ff"
#
# IP tables
#
t4 = VppIpTable(self, 1)
t4.add_vpp_config()
t6 = VppIpTable(self, 1, True)
t6.add_vpp_config()
rd1 = VppGbpRouteDomain(self, 2, 55, t4, t6)
rd1.add_vpp_config()
self.loop0.set_mac(self.router_mac)
#
# Bind the BVI to the RD
#
VppIpInterfaceBind(self, self.loop0, t4).add_vpp_config()
VppIpInterfaceBind(self, self.loop0, t6).add_vpp_config()
#
# Pg7 hosts a BD's BUM
# Pg1 some other l3 interface
#
self.pg7.config_ip4()
self.pg7.resolve_arp()
#
# a multicast vxlan-gbp tunnel for broadcast in the BD
#
tun_bm = VppVxlanGbpTunnel(self, self.pg7.local_ip4,
"239.1.1.1", 88,
mcast_itf=self.pg7)
tun_bm.add_vpp_config()
#
# a GBP external bridge domains for the EPs
#
bd1 = VppBridgeDomain(self, 1)
bd1.add_vpp_config()
gbd1 = VppGbpBridgeDomain(self, bd1, rd1, self.loop0, None, tun_bm)
gbd1.add_vpp_config()
#
# The Endpoint-groups in which the external endpoints exist
#
epg_220 = VppGbpEndpointGroup(self, 220, 113, rd1, gbd1,
None, gbd1.bvi,
"10.0.0.128",
"2001:10::128",
VppGbpEndpointRetention(2))
epg_220.add_vpp_config()
# the BVIs have the subnets applied ...
ip4_addr = VppIpInterfaceAddress(self, gbd1.bvi, "10.0.0.128", 24)
ip4_addr.add_vpp_config()
ip6_addr = VppIpInterfaceAddress(self, gbd1.bvi, "2001:10::128", 64)
ip6_addr.add_vpp_config()
# ... which are L3-out subnets
l3o_1 = VppGbpSubnet(
self, rd1, "10.0.0.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=113)
l3o_1.add_vpp_config()
#
# an external interface attached to the outside world and the
# external BD
#
VppL2Vtr(self, self.vlan_100, L2_VTR_OP.L2_POP_1).add_vpp_config()
VppL2Vtr(self, self.vlan_101, L2_VTR_OP.L2_POP_1).add_vpp_config()
vlan_144 = VppDot1QSubint(self, self.pg0, 144)
vlan_144.admin_up()
# vlan_102 is not poped
#
# an unicast vxlan-gbp for inter-RD traffic
#
vx_tun_l3 = VppGbpVxlanTunnel(
self, 444, rd1.rd_id,
VppEnum.vl_api_gbp_vxlan_tunnel_mode_t.GBP_VXLAN_TUNNEL_MODE_L3,
self.pg2.local_ip4)
vx_tun_l3.add_vpp_config()
#
# External Endpoints
#
eep1 = VppGbpEndpoint(self, self.vlan_100,
epg_220, None,
"10.0.0.1", "11.0.0.1",
"2001:10::1", "3001::1",
ep_flags.GBP_API_ENDPOINT_FLAG_EXTERNAL)
eep1.add_vpp_config()
eep2 = VppGbpEndpoint(self, self.vlan_101,
epg_220, None,
"10.0.0.2", "11.0.0.2",
"2001:10::2", "3001::2",
ep_flags.GBP_API_ENDPOINT_FLAG_EXTERNAL)
eep2.add_vpp_config()
eep3 = VppGbpEndpoint(self, self.vlan_102,
epg_220, None,
"10.0.0.3", "11.0.0.3",
"2001:10::3", "3001::3",
ep_flags.GBP_API_ENDPOINT_FLAG_EXTERNAL)
eep3.add_vpp_config()
#
# A remote external endpoint
#
rep = VppGbpEndpoint(self, vx_tun_l3,
epg_220, None,
"10.0.0.101", "11.0.0.101",
"2001:10::101", "3001::101",
ep_flags.GBP_API_ENDPOINT_FLAG_REMOTE,
self.pg7.local_ip4,
self.pg7.remote_ip4,
mac=None)
rep.add_vpp_config()
#
# EP1 impersonating EP3 is dropped
#
p = (Ether(src=eep1.mac, dst="ff:ff:ff:ff:ff:ff") /
Dot1Q(vlan=100) /
ARP(op="who-has",
psrc="10.0.0.3", pdst="10.0.0.128",
hwsrc=eep1.mac, hwdst="ff:ff:ff:ff:ff:ff"))
self.send_and_assert_no_replies(self.pg0, p)
#
# ARP packet from External EPs are accepted and replied to
#
p_arp = (Ether(src=eep1.mac, dst="ff:ff:ff:ff:ff:ff") /
Dot1Q(vlan=100) /
ARP(op="who-has",
psrc=eep1.ip4, pdst="10.0.0.128",
hwsrc=eep1.mac, hwdst="ff:ff:ff:ff:ff:ff"))
rxs = self.send_and_expect(self.pg0, p_arp * 1, self.pg0)
#
# ARP packet from host in remote subnet are accepted and replied to
#
p_arp = (Ether(src=eep3.mac, dst="ff:ff:ff:ff:ff:ff") /
Dot1Q(vlan=102) /
ARP(op="who-has",
psrc=eep3.ip4, pdst="10.0.0.128",
hwsrc=eep3.mac, hwdst="ff:ff:ff:ff:ff:ff"))
rxs = self.send_and_expect(self.pg0, p_arp * 1, self.pg0)
#
# packets destined to unknown addresses in the BVI's subnet
# are ARP'd for
#
p4 = (Ether(src=eep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src="10.0.0.1", dst="10.0.0.88") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
p6 = (Ether(src=eep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IPv6(src="2001:10::1", dst="2001:10::88") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p4 * 1, self.pg7)
for rx in rxs:
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
# self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, "239.1.1.1")
self.assertEqual(rx[VXLAN].vni, 88)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# policy was applied to the original IP packet
self.assertEqual(rx[VXLAN].gpid, 113)
self.assertTrue(rx[VXLAN].gpflags.A)
self.assertFalse(rx[VXLAN].gpflags.D)
inner = rx[VXLAN].payload
self.assertTrue(inner.haslayer(ARP))
#
# remote to external
#
p = (Ether(src=self.pg7.remote_mac,
dst=self.pg7.local_mac) /
IP(src=self.pg7.remote_ip4,
dst=self.pg7.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=444, gpid=113, flags=0x88) /
Ether(src=self.pg0.remote_mac, dst=str(self.router_mac)) /
IP(src="10.0.0.101", dst="10.0.0.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg7, p * 1, self.pg0)
#
# local EP pings router
#
p = (Ether(src=eep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src=eep1.ip4, dst="10.0.0.128") /
ICMP(type='echo-request'))
rxs = self.send_and_expect(self.pg0, p * 1, self.pg0)
for rx in rxs:
self.assertEqual(rx[Ether].src, str(self.router_mac))
self.assertEqual(rx[Ether].dst, eep1.mac)
self.assertEqual(rx[Dot1Q].vlan, 100)
#
# local EP pings other local EP
#
p = (Ether(src=eep1.mac, dst=eep2.mac) /
Dot1Q(vlan=100) /
IP(src=eep1.ip4, dst=eep2.ip4) /
ICMP(type='echo-request'))
rxs = self.send_and_expect(self.pg0, p * 1, self.pg0)
for rx in rxs:
self.assertEqual(rx[Ether].src, eep1.mac)
self.assertEqual(rx[Ether].dst, eep2.mac)
self.assertEqual(rx[Dot1Q].vlan, 101)
#
# local EP pings router w/o vlan tag poped
#
p = (Ether(src=eep3.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=102) /
IP(src=eep3.ip4, dst="10.0.0.128") /
ICMP(type='echo-request'))
rxs = self.send_and_expect(self.pg0, p * 1, self.pg0)
for rx in rxs:
self.assertEqual(rx[Ether].src, str(self.router_mac))
self.assertEqual(rx[Ether].dst, self.vlan_102.remote_mac)
#
# A ip4 subnet reachable through the external EP1
#
ip_220 = VppIpRoute(self, "10.220.0.0", 24,
[VppRoutePath(eep1.ip4,
eep1.epg.bvi.sw_if_index)],
table_id=t4.table_id)
ip_220.add_vpp_config()
l3o_220 = VppGbpSubnet(
self, rd1, "10.220.0.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=4220)
l3o_220.add_vpp_config()
#
# An ip6 subnet reachable through the external EP1
#
ip6_220 = VppIpRoute(self, "10:220::", 64,
[VppRoutePath(eep1.ip6,
eep1.epg.bvi.sw_if_index)],
table_id=t6.table_id)
ip6_220.add_vpp_config()
l3o6_220 = VppGbpSubnet(
self, rd1, "10:220::", 64,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=4220)
l3o6_220.add_vpp_config()
#
# A subnet reachable through the external EP2
#
ip_221 = VppIpRoute(self, "10.221.0.0", 24,
[VppRoutePath(eep2.ip4,
eep2.epg.bvi.sw_if_index)],
table_id=t4.table_id)
ip_221.add_vpp_config()
l3o_221 = VppGbpSubnet(
self, rd1, "10.221.0.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=4221)
l3o_221.add_vpp_config()
#
# ping between hosts in remote subnets
# dropped without a contract
#
p = (Ether(src=eep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src="10.220.0.1", dst="10.221.0.1") /
ICMP(type='echo-request'))
self.send_and_assert_no_replies(self.pg0, p * 1)
#
# contract for the external nets to communicate
#
acl = VppGbpAcl(self)
rule4 = acl.create_rule(permit_deny=1, proto=17)
rule6 = acl.create_rule(is_ipv6=1, permit_deny=1, proto=17)
acl_index = acl.add_vpp_config([rule4, rule6])
#
# A contract with the wrong scope is not matched
#
c_44 = VppGbpContract(
self, 44, 4220, 4221, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c_44.add_vpp_config()
self.send_and_assert_no_replies(self.pg0, p * 1)
c1 = VppGbpContract(
self, 55, 4220, 4221, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c1.add_vpp_config()
#
# Contracts allowing ext-net 200 to talk with external EPs
#
c2 = VppGbpContract(
self, 55, 4220, 113, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c2.add_vpp_config()
c3 = VppGbpContract(
self, 55, 113, 4220, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c3.add_vpp_config()
#
# ping between hosts in remote subnets
#
p = (Ether(src=eep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src="10.220.0.1", dst="10.221.0.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * 1, self.pg0)
for rx in rxs:
self.assertEqual(rx[Ether].src, str(self.router_mac))
self.assertEqual(rx[Ether].dst, eep2.mac)
self.assertEqual(rx[Dot1Q].vlan, 101)
# we did not learn these external hosts
self.assertFalse(find_gbp_endpoint(self, ip="10.220.0.1"))
self.assertFalse(find_gbp_endpoint(self, ip="10.221.0.1"))
#
# from remote external EP to local external EP
#
p = (Ether(src=self.pg7.remote_mac,
dst=self.pg7.local_mac) /
IP(src=self.pg7.remote_ip4,
dst=self.pg7.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=444, gpid=113, flags=0x88) /
Ether(src=self.pg0.remote_mac, dst=str(self.router_mac)) /
IP(src="10.0.0.101", dst="10.220.0.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg7, p * 1, self.pg0)
#
# ping from an external host to the remote external EP
#
p = (Ether(src=eep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src="10.220.0.1", dst=rep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * 1, self.pg7)
for rx in rxs:
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
# self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
self.assertEqual(rx[VXLAN].vni, 444)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# the sclass of the ext-net the packet came from
self.assertEqual(rx[VXLAN].gpid, 4220)
# policy was applied to the original IP packet
self.assertTrue(rx[VXLAN].gpflags.A)
# since it's an external host the reciever should not learn it
self.assertTrue(rx[VXLAN].gpflags.D)
inner = rx[VXLAN].payload
self.assertEqual(inner[IP].src, "10.220.0.1")
self.assertEqual(inner[IP].dst, rep.ip4)
#
# An external subnet reachable via the remote external EP
#
#
# first the VXLAN-GBP tunnel over which it is reached
#
vx_tun_r1 = VppVxlanGbpTunnel(
self, self.pg7.local_ip4,
self.pg7.remote_ip4, 445,
mode=(VppEnum.vl_api_vxlan_gbp_api_tunnel_mode_t.
VXLAN_GBP_API_TUNNEL_MODE_L3))
vx_tun_r1.add_vpp_config()
VppIpInterfaceBind(self, vx_tun_r1, t4).add_vpp_config()
self.logger.info(self.vapi.cli("sh vxlan-gbp tunnel"))
#
# then the special adj to resolve through on that tunnel
#
n1 = VppNeighbor(self,
vx_tun_r1.sw_if_index,
"00:0c:0c:0c:0c:0c",
self.pg7.remote_ip4)
n1.add_vpp_config()
#
# the route via the adj above
#
ip_222 = VppIpRoute(self, "10.222.0.0", 24,
[VppRoutePath(self.pg7.remote_ip4,
vx_tun_r1.sw_if_index)],
table_id=t4.table_id)
ip_222.add_vpp_config()
l3o_222 = VppGbpSubnet(
self, rd1, "10.222.0.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=4222)
l3o_222.add_vpp_config()
#
# ping between hosts in local and remote external subnets
# dropped without a contract
#
p = (Ether(src=eep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src="10.220.0.1", dst="10.222.0.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_assert_no_replies(self.pg0, p * 1)
#
# Add contracts ext-nets for 220 -> 222
#
c4 = VppGbpContract(
self, 55, 4220, 4222, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c4.add_vpp_config()
#
# ping from host in local to remote external subnets
#
p = (Ether(src=eep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src="10.220.0.1", dst="10.222.0.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * 3, self.pg7)
for rx in rxs:
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
self.assertEqual(rx[VXLAN].vni, 445)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# the sclass of the ext-net the packet came from
self.assertEqual(rx[VXLAN].gpid, 4220)
# policy was applied to the original IP packet
self.assertTrue(rx[VXLAN].gpflags.A)
# since it's an external host the reciever should not learn it
self.assertTrue(rx[VXLAN].gpflags.D)
inner = rx[VXLAN].payload
self.assertEqual(inner[Ether].dst, "00:0c:0c:0c:0c:0c")
self.assertEqual(inner[IP].src, "10.220.0.1")
self.assertEqual(inner[IP].dst, "10.222.0.1")
#
# make the external subnet ECMP
#
vx_tun_r2 = VppVxlanGbpTunnel(
self, self.pg7.local_ip4,
self.pg7.remote_ip4, 446,
mode=(VppEnum.vl_api_vxlan_gbp_api_tunnel_mode_t.
VXLAN_GBP_API_TUNNEL_MODE_L3))
vx_tun_r2.add_vpp_config()
VppIpInterfaceBind(self, vx_tun_r2, t4).add_vpp_config()
self.logger.info(self.vapi.cli("sh vxlan-gbp tunnel"))
n2 = VppNeighbor(self,
vx_tun_r2.sw_if_index,
"00:0c:0c:0c:0c:0c",
self.pg7.remote_ip4)
n2.add_vpp_config()
ip_222.modify([VppRoutePath(self.pg7.remote_ip4,
vx_tun_r1.sw_if_index),
VppRoutePath(self.pg7.remote_ip4,
vx_tun_r2.sw_if_index)])
#
# now expect load-balance
#
p = [(Ether(src=eep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src="10.220.0.1", dst="10.222.0.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100)),
(Ether(src=eep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src="10.220.0.1", dst="10.222.0.1") /
UDP(sport=1222, dport=1235) /
Raw(b'\xa5' * 100))]
rxs = self.send_and_expect(self.pg0, p, self.pg7)
self.assertEqual(rxs[0][VXLAN].vni, 445)
self.assertEqual(rxs[1][VXLAN].vni, 446)
#
# Same LB test for v6
#
n3 = VppNeighbor(self,
vx_tun_r1.sw_if_index,
"00:0c:0c:0c:0c:0c",
self.pg7.remote_ip6)
n3.add_vpp_config()
n4 = VppNeighbor(self,
vx_tun_r2.sw_if_index,
"00:0c:0c:0c:0c:0c",
self.pg7.remote_ip6)
n4.add_vpp_config()
ip_222_6 = VppIpRoute(self, "10:222::", 64,
[VppRoutePath(self.pg7.remote_ip6,
vx_tun_r1.sw_if_index),
VppRoutePath(self.pg7.remote_ip6,
vx_tun_r2.sw_if_index)],
table_id=t6.table_id)
ip_222_6.add_vpp_config()
l3o_222_6 = VppGbpSubnet(
self, rd1, "10:222::", 64,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=4222)
l3o_222_6.add_vpp_config()
p = [(Ether(src=eep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IPv6(src="10:220::1", dst="10:222::1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100)),
(Ether(src=eep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IPv6(src="10:220::1", dst="10:222::1") /
UDP(sport=7777, dport=8881) /
Raw(b'\xa5' * 100))]
self.logger.info(self.vapi.cli("sh ip6 fib 10:222::1"))
rxs = self.send_and_expect(self.pg0, p, self.pg7)
self.assertEqual(rxs[0][VXLAN].vni, 445)
self.assertEqual(rxs[1][VXLAN].vni, 446)
#
# ping from host in remote to local external subnets
# there's no contract for this, but the A bit is set.
#
p = (Ether(src=self.pg7.remote_mac, dst=self.pg7.local_mac) /
IP(src=self.pg7.remote_ip4, dst=self.pg7.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=445, gpid=4222, flags=0x88, gpflags='A') /
Ether(src=self.pg0.remote_mac, dst=str(self.router_mac)) /
IP(src="10.222.0.1", dst="10.220.0.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg7, p * 3, self.pg0)
self.assertFalse(find_gbp_endpoint(self, ip="10.222.0.1"))
#
# ping from host in remote to remote external subnets
# this is dropped by reflection check.
#
p = (Ether(src=self.pg7.remote_mac, dst=self.pg7.local_mac) /
IP(src=self.pg7.remote_ip4, dst=self.pg7.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=445, gpid=4222, flags=0x88, gpflags='A') /
Ether(src=self.pg0.remote_mac, dst=str(self.router_mac)) /
IP(src="10.222.0.1", dst="10.222.0.2") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_assert_no_replies(self.pg7, p * 3)
p = (Ether(src=self.pg7.remote_mac, dst=self.pg7.local_mac) /
IP(src=self.pg7.remote_ip4, dst=self.pg7.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=445, gpid=4222, flags=0x88, gpflags='A') /
Ether(src=self.pg0.remote_mac, dst=str(self.router_mac)) /
IPv6(src="10:222::1", dst="10:222::2") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_assert_no_replies(self.pg7, p * 3)
#
# local EP
#
lep1 = VppGbpEndpoint(self, vlan_144,
epg_220, None,
"10.0.0.44", "11.0.0.44",
"2001:10::44", "3001::44")
lep1.add_vpp_config()
#
# local EP to local ip4 external subnet
#
p = (Ether(src=lep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=144) /
IP(src=lep1.ip4, dst="10.220.0.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * 1, self.pg0)
for rx in rxs:
self.assertEqual(rx[Ether].src, str(self.router_mac))
self.assertEqual(rx[Ether].dst, eep1.mac)
self.assertEqual(rx[Dot1Q].vlan, 100)
#
# local EP to local ip6 external subnet
#
p = (Ether(src=lep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=144) /
IPv6(src=lep1.ip6, dst="10:220::1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * 1, self.pg0)
for rx in rxs:
self.assertEqual(rx[Ether].src, str(self.router_mac))
self.assertEqual(rx[Ether].dst, eep1.mac)
self.assertEqual(rx[Dot1Q].vlan, 100)
#
# ip4 and ip6 subnets that load-balance
#
ip_20 = VppIpRoute(self, "10.20.0.0", 24,
[VppRoutePath(eep1.ip4,
eep1.epg.bvi.sw_if_index),
VppRoutePath(eep2.ip4,
eep2.epg.bvi.sw_if_index)],
table_id=t4.table_id)
ip_20.add_vpp_config()
l3o_20 = VppGbpSubnet(
self, rd1, "10.20.0.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=4220)
l3o_20.add_vpp_config()
ip6_20 = VppIpRoute(self, "10:20::", 64,
[VppRoutePath(eep1.ip6,
eep1.epg.bvi.sw_if_index),
VppRoutePath(eep2.ip6,
eep2.epg.bvi.sw_if_index)],
table_id=t6.table_id)
ip6_20.add_vpp_config()
l3o6_20 = VppGbpSubnet(
self, rd1, "10:20::", 64,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=4220)
l3o6_20.add_vpp_config()
self.logger.info(self.vapi.cli("sh ip fib 10.20.0.1"))
self.logger.info(self.vapi.cli("sh ip6 fib 10:20::1"))
# two ip6 packets whose port are chosen so they load-balance
p = [(Ether(src=lep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=144) /
IPv6(src=lep1.ip6, dst="10:20::1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100)),
(Ether(src=lep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=144) /
IPv6(src=lep1.ip6, dst="10:20::1") /
UDP(sport=124, dport=1230) /
Raw(b'\xa5' * 100))]
rxs = self.send_and_expect(self.pg0, p, self.pg0, 2)
self.assertEqual(rxs[0][Dot1Q].vlan, 101)
self.assertEqual(rxs[1][Dot1Q].vlan, 100)
# two ip4 packets whose port are chosen so they load-balance
p = [(Ether(src=lep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=144) /
IP(src=lep1.ip4, dst="10.20.0.1") /
UDP(sport=1235, dport=1235) /
Raw(b'\xa5' * 100)),
(Ether(src=lep1.mac, dst=str(self.router_mac)) /
Dot1Q(vlan=144) /
IP(src=lep1.ip4, dst="10.20.0.1") /
UDP(sport=124, dport=1230) /
Raw(b'\xa5' * 100))]
rxs = self.send_and_expect(self.pg0, p, self.pg0, 2)
self.assertEqual(rxs[0][Dot1Q].vlan, 101)
self.assertEqual(rxs[1][Dot1Q].vlan, 100)
#
# cleanup
#
ip_222.remove_vpp_config()
self.pg7.unconfig_ip4()
self.vlan_101.set_vtr(L2_VTR_OP.L2_DISABLED)
self.vlan_100.set_vtr(L2_VTR_OP.L2_DISABLED)
def test_gbp_anon_l3_out(self):
""" GBP Anonymous L3 Out """
ep_flags = VppEnum.vl_api_gbp_endpoint_flags_t
self.vapi.cli("set logging class gbp level debug")
routed_dst_mac = "00:0c:0c:0c:0c:0c"
routed_src_mac = "00:22:bd:f8:19:ff"
#
# IP tables
#
t4 = VppIpTable(self, 1)
t4.add_vpp_config()
t6 = VppIpTable(self, 1, True)
t6.add_vpp_config()
rd1 = VppGbpRouteDomain(self, 2, 55, t4, t6)
rd1.add_vpp_config()
self.loop0.set_mac(self.router_mac)
#
# Bind the BVI to the RD
#
VppIpInterfaceBind(self, self.loop0, t4).add_vpp_config()
VppIpInterfaceBind(self, self.loop0, t6).add_vpp_config()
#
# Pg7 hosts a BD's BUM
# Pg1 some other l3 interface
#
self.pg7.config_ip4()
self.pg7.resolve_arp()
#
# a GBP external bridge domains for the EPs
#
bd1 = VppBridgeDomain(self, 1)
bd1.add_vpp_config()
gbd1 = VppGbpBridgeDomain(self, bd1, rd1, self.loop0, None, None)
gbd1.add_vpp_config()
#
# The Endpoint-groups in which the external endpoints exist
#
epg_220 = VppGbpEndpointGroup(self, 220, 113, rd1, gbd1,
None, gbd1.bvi,
"10.0.0.128",
"2001:10::128",
VppGbpEndpointRetention(2))
epg_220.add_vpp_config()
# the BVIs have the subnet applied ...
ip4_addr = VppIpInterfaceAddress(self, gbd1.bvi, "10.0.0.128", 24)
ip4_addr.add_vpp_config()
# ... which is an Anonymous L3-out subnets
l3o_1 = VppGbpSubnet(
self, rd1, "10.0.0.0", 24,
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_ANON_L3_OUT,
sclass=113)
l3o_1.add_vpp_config()
#
# an external interface attached to the outside world and the
# external BD
#
VppL2Vtr(self, self.vlan_100, L2_VTR_OP.L2_POP_1).add_vpp_config()
VppL2Vtr(self, self.vlan_101, L2_VTR_OP.L2_POP_1).add_vpp_config()
#
# vlan_100 and vlan_101 are anonymous l3-out interfaces
#
ext_itf = VppGbpExtItf(self, self.vlan_100, bd1, rd1, anon=True)
ext_itf.add_vpp_config()
ext_itf = VppGbpExtItf(self, self.vlan_101, bd1, rd1, anon=True)
ext_itf.add_vpp_config()
#
# an unicast vxlan-gbp for inter-RD traffic
#
vx_tun_l3 = VppGbpVxlanTunnel(
self, 444, rd1.rd_id,
VppEnum.vl_api_gbp_vxlan_tunnel_mode_t.GBP_VXLAN_TUNNEL_MODE_L3,
self.pg2.local_ip4)
vx_tun_l3.add_vpp_config()
#
# A remote external endpoint
#
rep = VppGbpEndpoint(self, vx_tun_l3,
epg_220, None,
"10.0.0.201", "11.0.0.201",
"2001:10::201", "3001::101",
ep_flags.GBP_API_ENDPOINT_FLAG_REMOTE,
self.pg7.local_ip4,
self.pg7.remote_ip4,
mac=None)
rep.add_vpp_config()
#
# ARP packet from host in external subnet are accepted, flooded and
# replied to. We expect 2 packets:
# - APR request flooded over the other vlan subif
# - ARP reply from BVI
#
p_arp = (Ether(src=self.vlan_100.remote_mac,
dst="ff:ff:ff:ff:ff:ff") /
Dot1Q(vlan=100) /
ARP(op="who-has",
psrc="10.0.0.100",
pdst="10.0.0.128",
hwsrc=self.vlan_100.remote_mac,
hwdst="ff:ff:ff:ff:ff:ff"))
rxs = self.send_and_expect(self.pg0, p_arp * 1, self.pg0, n_rx=2)
p_arp = (Ether(src=self.vlan_101.remote_mac,
dst="ff:ff:ff:ff:ff:ff") /
Dot1Q(vlan=101) /
ARP(op="who-has",
psrc='10.0.0.101',
pdst="10.0.0.128",
hwsrc=self.vlan_101.remote_mac,
hwdst="ff:ff:ff:ff:ff:ff"))
rxs = self.send_and_expect(self.pg0, p_arp * 1, self.pg0, n_rx=2)
#
# remote to external
#
p = (Ether(src=self.pg7.remote_mac,
dst=self.pg7.local_mac) /
IP(src=self.pg7.remote_ip4,
dst=self.pg7.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=vx_tun_l3.vni, gpid=epg_220.sclass, flags=0x88) /
Ether(src=self.pg0.remote_mac, dst=str(self.router_mac)) /
IP(src=str(rep.ip4), dst="10.0.0.100") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg7, p * 1, self.pg0)
#
# local EP pings router
#
p = (Ether(src=self.vlan_100.remote_mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src="10.0.0.100", dst="10.0.0.128") /
ICMP(type='echo-request'))
rxs = self.send_and_expect(self.pg0, p * 1, self.pg0)
for rx in rxs:
self.assertEqual(rx[Ether].src, str(self.router_mac))
self.assertEqual(rx[Ether].dst, self.vlan_100.remote_mac)
self.assertEqual(rx[Dot1Q].vlan, 100)
#
# local EP pings other local EP
#
p = (Ether(src=self.vlan_100.remote_mac,
dst=self.vlan_101.remote_mac) /
Dot1Q(vlan=100) /
IP(src="10.0.0.100", dst="10.0.0.101") /
ICMP(type='echo-request'))
rxs = self.send_and_expect(self.pg0, p * 1, self.pg0)
for rx in rxs:
self.assertEqual(rx[Ether].src, self.vlan_100.remote_mac)
self.assertEqual(rx[Ether].dst, self.vlan_101.remote_mac)
self.assertEqual(rx[Dot1Q].vlan, 101)
#
# A subnet reachable through an external router on vlan 100
#
ip_220 = VppIpRoute(self, "10.220.0.0", 24,
[VppRoutePath("10.0.0.100",
epg_220.bvi.sw_if_index)],
table_id=t4.table_id)
ip_220.add_vpp_config()
l3o_220 = VppGbpSubnet(
self, rd1, "10.220.0.0", 24,
# note: this a "regular" L3 out subnet (not connected)
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=4220)
l3o_220.add_vpp_config()
#
# A subnet reachable through an external router on vlan 101
#
ip_221 = VppIpRoute(self, "10.221.0.0", 24,
[VppRoutePath("10.0.0.101",
epg_220.bvi.sw_if_index)],
table_id=t4.table_id)
ip_221.add_vpp_config()
l3o_221 = VppGbpSubnet(
self, rd1, "10.221.0.0", 24,
# note: this a "regular" L3 out subnet (not connected)
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=4221)
l3o_221.add_vpp_config()
#
# ping between hosts in remote subnets
# dropped without a contract
#
p = (Ether(src=self.vlan_100.remote_mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src="10.220.0.1", dst="10.221.0.1") /
ICMP(type='echo-request'))
rxs = self.send_and_assert_no_replies(self.pg0, p * 1)
#
# contract for the external nets to communicate
#
acl = VppGbpAcl(self)
rule4 = acl.create_rule(permit_deny=1, proto=17)
rule6 = acl.create_rule(is_ipv6=1, permit_deny=1, proto=17)
acl_index = acl.add_vpp_config([rule4, rule6])
c1 = VppGbpContract(
self, 55, 4220, 4221, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c1.add_vpp_config()
#
# Contracts allowing ext-net 200 to talk with external EPs
#
c2 = VppGbpContract(
self, 55, 4220, 113, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c2.add_vpp_config()
c3 = VppGbpContract(
self, 55, 113, 4220, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c3.add_vpp_config()
#
# ping between hosts in remote subnets
#
p = (Ether(src=self.vlan_100.remote_mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src="10.220.0.1", dst="10.221.0.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * 1, self.pg0)
for rx in rxs:
self.assertEqual(rx[Ether].src, str(self.router_mac))
self.assertEqual(rx[Ether].dst, self.vlan_101.remote_mac)
self.assertEqual(rx[Dot1Q].vlan, 101)
# we did not learn these external hosts
self.assertFalse(find_gbp_endpoint(self, ip="10.220.0.1"))
self.assertFalse(find_gbp_endpoint(self, ip="10.221.0.1"))
#
# from remote external EP to local external EP
#
p = (Ether(src=self.pg7.remote_mac,
dst=self.pg7.local_mac) /
IP(src=self.pg7.remote_ip4,
dst=self.pg7.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=444, gpid=113, flags=0x88) /
Ether(src=self.pg0.remote_mac, dst=str(self.router_mac)) /
IP(src=rep.ip4, dst="10.220.0.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg7, p * 1, self.pg0)
#
# ping from an external host to the remote external EP
#
p = (Ether(src=self.vlan_100.remote_mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src="10.220.0.1", dst=rep.ip4) /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * 1, self.pg7)
for rx in rxs:
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
# self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
self.assertEqual(rx[VXLAN].vni, 444)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# the sclass of the ext-net the packet came from
self.assertEqual(rx[VXLAN].gpid, 4220)
# policy was applied to the original IP packet
self.assertTrue(rx[VXLAN].gpflags.A)
# since it's an external host the reciever should not learn it
self.assertTrue(rx[VXLAN].gpflags.D)
inner = rx[VXLAN].payload
self.assertEqual(inner[IP].src, "10.220.0.1")
self.assertEqual(inner[IP].dst, rep.ip4)
#
# An external subnet reachable via the remote external EP
#
#
# first the VXLAN-GBP tunnel over which it is reached
#
vx_tun_r = VppVxlanGbpTunnel(
self, self.pg7.local_ip4,
self.pg7.remote_ip4, 445,
mode=(VppEnum.vl_api_vxlan_gbp_api_tunnel_mode_t.
VXLAN_GBP_API_TUNNEL_MODE_L3))
vx_tun_r.add_vpp_config()
VppIpInterfaceBind(self, vx_tun_r, t4).add_vpp_config()
self.logger.info(self.vapi.cli("sh vxlan-gbp tunnel"))
#
# then the special adj to resolve through on that tunnel
#
n1 = VppNeighbor(self,
vx_tun_r.sw_if_index,
"00:0c:0c:0c:0c:0c",
self.pg7.remote_ip4)
n1.add_vpp_config()
#
# the route via the adj above
#
ip_222 = VppIpRoute(self, "10.222.0.0", 24,
[VppRoutePath(self.pg7.remote_ip4,
vx_tun_r.sw_if_index)],
table_id=t4.table_id)
ip_222.add_vpp_config()
l3o_222 = VppGbpSubnet(
self, rd1, "10.222.0.0", 24,
# note: this a "regular" l3out subnet (not connected)
VppEnum.vl_api_gbp_subnet_type_t.GBP_API_SUBNET_L3_OUT,
sclass=4222)
l3o_222.add_vpp_config()
#
# ping between hosts in local and remote external subnets
# dropped without a contract
#
p = (Ether(src=self.vlan_100.remote_mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src="10.220.0.1", dst="10.222.0.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_assert_no_replies(self.pg0, p * 1)
#
# Add contracts ext-nets for 220 -> 222
#
c4 = VppGbpContract(
self, 55, 4220, 4222, acl_index,
[VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[]),
VppGbpContractRule(
VppEnum.vl_api_gbp_rule_action_t.GBP_API_RULE_PERMIT,
VppEnum.vl_api_gbp_hash_mode_t.GBP_API_HASH_MODE_SRC_IP,
[])],
[ETH_P_IP, ETH_P_IPV6])
c4.add_vpp_config()
#
# ping from host in local to remote external subnets
#
p = (Ether(src=self.vlan_100.remote_mac, dst=str(self.router_mac)) /
Dot1Q(vlan=100) /
IP(src="10.220.0.1", dst="10.222.0.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg0, p * 3, self.pg7)
for rx in rxs:
self.assertEqual(rx[Ether].src, self.pg7.local_mac)
self.assertEqual(rx[Ether].dst, self.pg7.remote_mac)
self.assertEqual(rx[IP].src, self.pg7.local_ip4)
self.assertEqual(rx[IP].dst, self.pg7.remote_ip4)
self.assertEqual(rx[VXLAN].vni, 445)
self.assertTrue(rx[VXLAN].flags.G)
self.assertTrue(rx[VXLAN].flags.Instance)
# the sclass of the ext-net the packet came from
self.assertEqual(rx[VXLAN].gpid, 4220)
# policy was applied to the original IP packet
self.assertTrue(rx[VXLAN].gpflags.A)
# since it's an external host the reciever should not learn it
self.assertTrue(rx[VXLAN].gpflags.D)
inner = rx[VXLAN].payload
self.assertEqual(inner[Ether].dst, "00:0c:0c:0c:0c:0c")
self.assertEqual(inner[IP].src, "10.220.0.1")
self.assertEqual(inner[IP].dst, "10.222.0.1")
#
# ping from host in remote to local external subnets
# there's no contract for this, but the A bit is set.
#
p = (Ether(src=self.pg7.remote_mac, dst=self.pg7.local_mac) /
IP(src=self.pg7.remote_ip4, dst=self.pg7.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=445, gpid=4222, flags=0x88, gpflags='A') /
Ether(src=self.pg0.remote_mac, dst=str(self.router_mac)) /
IP(src="10.222.0.1", dst="10.220.0.1") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_expect(self.pg7, p * 3, self.pg0)
self.assertFalse(find_gbp_endpoint(self, ip="10.222.0.1"))
#
# ping from host in remote to remote external subnets
# this is dropped by reflection check.
#
p = (Ether(src=self.pg7.remote_mac, dst=self.pg7.local_mac) /
IP(src=self.pg7.remote_ip4, dst=self.pg7.local_ip4) /
UDP(sport=1234, dport=48879) /
VXLAN(vni=445, gpid=4222, flags=0x88, gpflags='A') /
Ether(src=self.pg0.remote_mac, dst=str(self.router_mac)) /
IP(src="10.222.0.1", dst="10.222.0.2") /
UDP(sport=1234, dport=1234) /
Raw(b'\xa5' * 100))
rxs = self.send_and_assert_no_replies(self.pg7, p * 3)
#
# cleanup
#
self.vlan_101.set_vtr(L2_VTR_OP.L2_DISABLED)
self.vlan_100.set_vtr(L2_VTR_OP.L2_DISABLED)
self.pg7.unconfig_ip4()
if __name__ == '__main__':
unittest.main(testRunner=VppTestRunner)
| 38.762729
| 79
| 0.51652
| 28,817
| 228,390
| 3.886699
| 0.031336
| 0.02949
| 0.034499
| 0.024107
| 0.845201
| 0.822692
| 0.803639
| 0.779542
| 0.764256
| 0.746668
| 0
| 0.074473
| 0.373917
| 228,390
| 5,891
| 80
| 38.769309
| 0.708814
| 0.080682
| 0
| 0.743289
| 0
| 0
| 0.039219
| 0.002352
| 0
| 0
| 0.0009
| 0.00017
| 0.130796
| 1
| 0.023914
| false
| 0
| 0.005124
| 0.005612
| 0.047584
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
db5e1ce7e3faf3e249cc7aca967a225996da4c18
| 52,758
|
py
|
Python
|
stage/configuration/test_http_client_destination.py
|
streamsets/datacollector-tests
|
6c3e908768e1d4a586e9183e2141096921ecd5be
|
[
"Apache-2.0"
] | 14
|
2019-03-04T10:12:39.000Z
|
2021-11-24T16:17:09.000Z
|
stage/configuration/test_http_client_destination.py
|
Pragatibs/datacollector-tests
|
aac53b2f0e056009ef0e437c8430651e3cf4d502
|
[
"Apache-2.0"
] | 48
|
2019-03-08T14:59:06.000Z
|
2021-08-13T14:49:56.000Z
|
stage/configuration/test_http_client_destination.py
|
Pragatibs/datacollector-tests
|
aac53b2f0e056009ef0e437c8430651e3cf4d502
|
[
"Apache-2.0"
] | 23
|
2018-09-24T20:49:17.000Z
|
2021-11-24T16:17:11.000Z
|
# Copyright 2021 StreamSets Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from streamsets.testframework.decorators import stub
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'BASIC', 'use_oauth_2': True},
{'authentication_type': 'DIGEST', 'use_oauth_2': True},
{'authentication_type': 'NONE', 'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL', 'use_oauth_2': True}])
def test_additional_key_value_pairs_in_token_request_body(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'BASIC'},
{'authentication_type': 'DIGEST'},
{'authentication_type': 'NONE'},
{'authentication_type': 'OAUTH'},
{'authentication_type': 'UNIVERSAL'}])
def test_authentication_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'avro_compression_codec': 'BZIP2', 'data_format': 'AVRO'},
{'avro_compression_codec': 'DEFLATE', 'data_format': 'AVRO'},
{'avro_compression_codec': 'NULL', 'data_format': 'AVRO'},
{'avro_compression_codec': 'SNAPPY', 'data_format': 'AVRO'}])
def test_avro_compression_codec(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'avro_schema_location': 'INLINE', 'data_format': 'AVRO'}])
def test_avro_schema(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'avro_schema_location': 'HEADER', 'data_format': 'AVRO'},
{'avro_schema_location': 'INLINE', 'data_format': 'AVRO'},
{'avro_schema_location': 'REGISTRY', 'data_format': 'AVRO'}])
def test_avro_schema_location(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'BINARY'}])
def test_binary_field_path(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_charset(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'checksum_algorithm': 'MD5',
'data_format': 'WHOLE_FILE',
'include_checksum_in_events': True},
{'checksum_algorithm': 'MURMUR3_128',
'data_format': 'WHOLE_FILE',
'include_checksum_in_events': True},
{'checksum_algorithm': 'MURMUR3_32',
'data_format': 'WHOLE_FILE',
'include_checksum_in_events': True},
{'checksum_algorithm': 'SHA1',
'data_format': 'WHOLE_FILE',
'include_checksum_in_events': True},
{'checksum_algorithm': 'SHA256',
'data_format': 'WHOLE_FILE',
'include_checksum_in_events': True},
{'checksum_algorithm': 'SHA512',
'data_format': 'WHOLE_FILE',
'include_checksum_in_events': True}])
def test_checksum_algorithm(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_default_cipher_suites': False, 'use_tls': True}])
def test_cipher_suites(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'NONE',
'credentials_grant_type': 'CLIENT_CREDENTIALS',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True}])
def test_client_id(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'NONE',
'credentials_grant_type': 'CLIENT_CREDENTIALS',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True}])
def test_client_secret(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_connect_timeout(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'OAUTH'}])
def test_consumer_key(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'OAUTH'}])
def test_consumer_secret(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'BASIC',
'credentials_grant_type': 'CLIENT_CREDENTIALS',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'CLIENT_CREDENTIALS',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'CLIENT_CREDENTIALS',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'CLIENT_CREDENTIALS',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True}])
def test_credentials_grant_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'AVRO'},
{'data_format': 'BINARY'},
{'data_format': 'DELIMITED'},
{'data_format': 'JSON'},
{'data_format': 'PROTOBUF'},
{'data_format': 'SDC_JSON'},
{'data_format': 'TEXT'}])
def test_data_format(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'DELIMITED', 'delimiter_format': 'CUSTOM'}])
def test_delimiter_character(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'DELIMITED', 'delimiter_format': 'CSV'},
{'data_format': 'DELIMITED', 'delimiter_format': 'CUSTOM'},
{'data_format': 'DELIMITED', 'delimiter_format': 'EXCEL'},
{'data_format': 'DELIMITED', 'delimiter_format': 'MULTI_CHARACTER'},
{'data_format': 'DELIMITED', 'delimiter_format': 'MYSQL'},
{'data_format': 'DELIMITED', 'delimiter_format': 'POSTGRES_CSV'},
{'data_format': 'DELIMITED', 'delimiter_format': 'POSTGRES_TEXT'},
{'data_format': 'DELIMITED', 'delimiter_format': 'RFC4180'},
{'data_format': 'DELIMITED', 'delimiter_format': 'TDF'}])
def test_delimiter_format(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'enable_request_logging': False}, {'enable_request_logging': True}])
def test_enable_request_logging(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'DELIMITED', 'delimiter_format': 'CUSTOM'}])
def test_escape_character(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'WHOLE_FILE', 'file_exists': 'OVERWRITE'},
{'data_format': 'WHOLE_FILE', 'file_exists': 'TO_ERROR'}])
def test_file_exists(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'WHOLE_FILE'}])
def test_file_name_expression(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'DELIMITED', 'header_line': 'IGNORE_HEADER'},
{'data_format': 'DELIMITED', 'header_line': 'NO_HEADER'},
{'data_format': 'DELIMITED', 'header_line': 'WITH_HEADER'}])
def test_header_line(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_headers(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'http_compression': 'GZIP'},
{'http_compression': 'NONE'},
{'http_compression': 'SNAPPY'}])
def test_http_compression(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'http_method': 'DELETE'},
{'http_method': 'EXPRESSION'},
{'http_method': 'GET'},
{'http_method': 'HEAD'},
{'http_method': 'PATCH'},
{'http_method': 'POST'},
{'http_method': 'PUT'}])
def test_http_method(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'http_method': 'EXPRESSION'}])
def test_http_method_expression(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'WHOLE_FILE', 'include_checksum_in_events': False},
{'data_format': 'WHOLE_FILE', 'include_checksum_in_events': True}])
def test_include_checksum_in_events(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'AVRO', 'include_schema': False},
{'data_format': 'AVRO', 'include_schema': True}])
def test_include_schema(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'TEXT',
'insert_record_separator_if_no_text': False,
'on_missing_field': 'IGNORE'},
{'data_format': 'TEXT',
'insert_record_separator_if_no_text': True,
'on_missing_field': 'IGNORE'}])
def test_insert_record_separator_if_no_text(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'JSON', 'json_content': 'ARRAY_OBJECTS'},
{'data_format': 'JSON', 'json_content': 'MULTIPLE_OBJECTS'}])
def test_json_content(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'use_oauth_2': True}])
def test_jwt_claims(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS256',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS384',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS512',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'NONE',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS256',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS384',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS512',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS256',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS384',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS512',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'NONE',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS256',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS384',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS512',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS256',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS384',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS512',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'NONE',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS256',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS384',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS512',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS256',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS384',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS512',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'NONE',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS256',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS384',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS512',
'use_oauth_2': True}])
def test_jwt_signing_algorithm(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS256',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS384',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS512',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS256',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS384',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS512',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS256',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS384',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS512',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS256',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS384',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS512',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS256',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS384',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS512',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS256',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS384',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS512',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS256',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS384',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'HS512',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS256',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS384',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'JWT',
'jwt_signing_algorithm': 'RS512',
'use_oauth_2': True}])
def test_jwt_signing_key_in_base64_encoded(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_keystore_file(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_keystore_key_algorithm(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_keystore_password(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'keystore_type': 'JKS', 'use_tls': True},
{'keystore_type': 'PKCS12', 'use_tls': True}])
def test_keystore_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'enable_request_logging': True, 'log_level': 'ALL'},
{'enable_request_logging': True, 'log_level': 'CONFIG'},
{'enable_request_logging': True, 'log_level': 'FINE'},
{'enable_request_logging': True, 'log_level': 'FINER'},
{'enable_request_logging': True, 'log_level': 'FINEST'},
{'enable_request_logging': True, 'log_level': 'INFO'},
{'enable_request_logging': True, 'log_level': 'OFF'},
{'enable_request_logging': True, 'log_level': 'SEVERE'},
{'enable_request_logging': True, 'log_level': 'WARNING'}])
def test_log_level(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'avro_schema_location': 'REGISTRY',
'data_format': 'AVRO',
'lookup_schema_by': 'ID'},
{'avro_schema_location': 'REGISTRY',
'data_format': 'AVRO',
'lookup_schema_by': 'SUBJECT'}])
def test_lookup_schema_by(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'enable_request_logging': True}])
def test_max_entity_size(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_maximum_parallel_requests(sdc_builder, sdc_executor):
pass
@stub
def test_maximum_request_time_in_sec(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'PROTOBUF'}])
def test_message_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'DELIMITED', 'replace_new_line_characters': True}])
def test_new_line_character_replacement(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'TEXT', 'on_missing_field': 'ERROR'},
{'data_format': 'TEXT', 'on_missing_field': 'IGNORE'}])
def test_on_missing_field(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'on_record_error': 'DISCARD'},
{'on_record_error': 'STOP_PIPELINE'},
{'on_record_error': 'TO_ERROR'}])
def test_on_record_error(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'one_request_per_batch': False}, {'one_request_per_batch': True}])
def test_one_request_per_batch(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'BASIC',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'BASIC'},
{'authentication_type': 'DIGEST'},
{'authentication_type': 'UNIVERSAL'},
{'use_proxy': True}])
def test_password(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_preconditions(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'XML', 'pretty_format': False},
{'data_format': 'XML', 'pretty_format': True}])
def test_pretty_format(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'PROTOBUF'}])
def test_protobuf_descriptor_file(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_proxy': True}])
def test_proxy_uri(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'DELIMITED', 'delimiter_format': 'CUSTOM'}])
def test_quote_character(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'DELIMITED',
'delimiter_format': 'CUSTOM',
'quote_mode': 'ALL'},
{'data_format': 'DELIMITED',
'delimiter_format': 'CUSTOM',
'quote_mode': 'MINIMAL'},
{'data_format': 'DELIMITED',
'delimiter_format': 'CUSTOM',
'quote_mode': 'NONE'}])
def test_quote_mode(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_rate_limit_in_requests_per_sec(sdc_builder, sdc_executor):
pass
@stub
def test_read_timeout(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'TEXT'}])
def test_record_separator(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'avro_schema_location': 'HEADER',
'data_format': 'AVRO',
'register_schema': False},
{'avro_schema_location': 'HEADER',
'data_format': 'AVRO',
'register_schema': True},
{'avro_schema_location': 'INLINE',
'data_format': 'AVRO',
'register_schema': False},
{'avro_schema_location': 'INLINE',
'data_format': 'AVRO',
'register_schema': True}])
def test_register_schema(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'DELIMITED', 'replace_new_line_characters': False},
{'data_format': 'DELIMITED', 'replace_new_line_characters': True}])
def test_replace_new_line_characters(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'request_transfer_encoding': 'BUFFERED'},
{'request_transfer_encoding': 'CHUNKED'},
{'authentication_type': 'BASIC',
'request_transfer_encoding': 'BUFFERED',
'use_oauth_2': True},
{'authentication_type': 'BASIC',
'request_transfer_encoding': 'CHUNKED',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'request_transfer_encoding': 'BUFFERED',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'request_transfer_encoding': 'CHUNKED',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'request_transfer_encoding': 'BUFFERED',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'request_transfer_encoding': 'CHUNKED',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'request_transfer_encoding': 'BUFFERED',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'request_transfer_encoding': 'CHUNKED',
'use_oauth_2': True}])
def test_request_transfer_encoding(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
def test_required_fields(sdc_builder, sdc_executor):
pass
@stub
def test_resource_url(sdc_builder, sdc_executor):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'response_type': 'DESTINATION_RESPONSE',
'send_response_to_origin': True},
{'response_type': 'SUCCESS_RECORDS', 'send_response_to_origin': True}])
def test_response_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'avro_schema_location': 'REGISTRY',
'data_format': 'AVRO',
'lookup_schema_by': 'ID'}])
def test_schema_id(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'avro_schema_location': 'HEADER',
'data_format': 'AVRO',
'register_schema': True},
{'avro_schema_location': 'INLINE',
'data_format': 'AVRO',
'register_schema': True},
{'avro_schema_location': 'REGISTRY', 'data_format': 'AVRO'}])
def test_schema_registry_urls(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'avro_schema_location': 'REGISTRY',
'data_format': 'AVRO',
'lookup_schema_by': 'SUBJECT'},
{'avro_schema_location': 'HEADER',
'data_format': 'AVRO',
'register_schema': True},
{'avro_schema_location': 'INLINE',
'data_format': 'AVRO',
'register_schema': True}])
def test_schema_subject(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'send_response_to_origin': False}, {'send_response_to_origin': True}])
def test_send_response_to_origin(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'TEXT'}])
def test_text_field_path(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'OAUTH'}])
def test_token(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'OAUTH'}])
def test_token_secret(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'BASIC', 'use_oauth_2': True},
{'authentication_type': 'DIGEST', 'use_oauth_2': True},
{'authentication_type': 'NONE', 'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL', 'use_oauth_2': True}])
def test_token_url(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_default_protocols': False, 'use_tls': True}])
def test_transport_protocols(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_truststore_file(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_truststore_password(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': True}])
def test_truststore_trust_algorithm(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'truststore_type': 'JKS', 'use_tls': True},
{'truststore_type': 'PKCS12', 'use_tls': True}])
def test_truststore_type(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_default_cipher_suites': False, 'use_tls': True},
{'use_default_cipher_suites': True, 'use_tls': True}])
def test_use_default_cipher_suites(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_default_protocols': False, 'use_tls': True},
{'use_default_protocols': True, 'use_tls': True}])
def test_use_default_protocols(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'BASIC', 'use_oauth_2': False},
{'authentication_type': 'BASIC', 'use_oauth_2': True},
{'authentication_type': 'DIGEST', 'use_oauth_2': False},
{'authentication_type': 'DIGEST', 'use_oauth_2': True},
{'authentication_type': 'NONE', 'use_oauth_2': False},
{'authentication_type': 'NONE', 'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL', 'use_oauth_2': False},
{'authentication_type': 'UNIVERSAL', 'use_oauth_2': True}])
def test_use_oauth_2(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_proxy': False}, {'use_proxy': True}])
def test_use_proxy(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'use_tls': False}, {'use_tls': True}])
def test_use_tls(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'authentication_type': 'BASIC',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'DIGEST',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'NONE',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'UNIVERSAL',
'credentials_grant_type': 'RESOURCE_OWNER',
'use_oauth_2': True},
{'authentication_type': 'BASIC'},
{'authentication_type': 'DIGEST'},
{'authentication_type': 'UNIVERSAL'},
{'use_proxy': True}])
def test_username(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'XML', 'validate_schema': False},
{'data_format': 'XML', 'validate_schema': True}])
def test_validate_schema(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'enable_request_logging': True, 'verbosity': 'HEADERS_ONLY'},
{'enable_request_logging': True, 'verbosity': 'PAYLOAD_ANY'},
{'enable_request_logging': True, 'verbosity': 'PAYLOAD_TEXT'}])
def test_verbosity(sdc_builder, sdc_executor, stage_attributes):
pass
@stub
@pytest.mark.parametrize('stage_attributes', [{'data_format': 'XML', 'validate_schema': True}])
def test_xml_schema(sdc_builder, sdc_executor, stage_attributes):
pass
| 53.129909
| 117
| 0.440957
| 3,839
| 52,758
| 5.647564
| 0.068768
| 0.105161
| 0.046077
| 0.063558
| 0.891749
| 0.871685
| 0.826207
| 0.816245
| 0.794982
| 0.784051
| 0
| 0.010165
| 0.461124
| 52,758
| 992
| 118
| 53.183468
| 0.752445
| 0.010463
| 0
| 0.735404
| 0
| 0
| 0.276044
| 0.082026
| 0
| 0
| 0
| 0
| 0
| 1
| 0.106832
| false
| 0.110559
| 0.002484
| 0
| 0.109317
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 8
|
dba19f053bfade3f2180f15bd9888b3b7e66b026
| 2,685
|
py
|
Python
|
examples/example_proj_django_3/test_app/tests.py
|
philsupertramp/dj-migration-test
|
97ec4513b9848d96436907de7940841866895e3c
|
[
"MIT"
] | 4
|
2019-07-05T19:32:07.000Z
|
2020-02-07T00:47:15.000Z
|
examples/example_proj_django_3/test_app/tests.py
|
philsupertramp/dj-migration-test
|
97ec4513b9848d96436907de7940841866895e3c
|
[
"MIT"
] | 17
|
2019-08-23T07:21:23.000Z
|
2021-09-22T18:44:26.000Z
|
examples/example_proj_django_3/test_app/tests.py
|
philsupertramp/dj-migration-test
|
97ec4513b9848d96436907de7940841866895e3c
|
[
"MIT"
] | null | null | null |
from dj_migration_test.test_clients import MigrationTestCase
class TestAppMigration0001To0003TestCase(MigrationTestCase):
migrate_from = ('test_app', '0001_initial')
migrate_to = ('test_app', '0003_remove_testobja_name')
@staticmethod
def setUpDataBeforeMigration(test_instance, apps):
test_obj_a_class = apps.get_model('test_app', 'TestObjA')
test_obj_a = test_obj_a_class.objects.create(name='ab')
test_instance.obj_id = test_obj_a.id
def test_name_split(self):
test_obj_a_class = self.apps.get_model('test_app', 'TestObjA')
test_obj_a = test_obj_a_class.objects.get(pk=self.obj_id)
self.assertEqual(test_obj_a.first_name, 'a')
self.assertEqual(test_obj_a.last_name, 'b')
class TestAppMigration0003To0004TestCase(MigrationTestCase):
migrate_to = ('test_app', '0004_auto_20200314_0946')
@staticmethod
def setUpDataBeforeMigration(test_instance, apps):
test_obj_a_class = apps.get_model('test_app', 'TestObjA')
test_obj_a = test_obj_a_class.objects.create(first_name='a', last_name='b')
test_instance.obj_id = test_obj_a.id
def test_sth(self):
test_obj_a_class = self.apps.get_model('test_app', 'TestObjA')
test_obj_a = test_obj_a_class.objects.get(pk=self.obj_id)
self.assertEqual(test_obj_a.fk_dep, None)
class TestAppMigration0003To0004_2TestCase(MigrationTestCase):
migrate_from = ('test_app', '0003_remove_testobja_name')
migrate_to = ('test_app', '0004_auto_20200314_0946')
@staticmethod
def setUpDataBeforeMigration(test_instance, apps):
test_obj_a_class = apps.get_model('test_app', 'TestObjA')
test_obj_a = test_obj_a_class.objects.create(first_name='a', last_name='b')
test_instance.obj_id = test_obj_a.id
def test_sth(self):
test_obj_a_class = self.apps.get_model('test_app', 'TestObjA')
test_obj_a = test_obj_a_class.objects.get(pk=self.obj_id)
self.assertEqual(test_obj_a.fk_dep, None)
class TestAppMigration0004To0005TestCase(MigrationTestCase):
migrate_from = ('test_app', '0003_remove_testobja_name')
migrate_to = ('test_app', '0005_testobja_second_fk_dep')
@staticmethod
def setUpDataBeforeMigration(test_instance, apps):
test_obj_a_class = apps.get_model('test_app', 'TestObjA')
test_obj_a = test_obj_a_class.objects.create(first_name='a', last_name='b')
test_instance.obj_id = test_obj_a.id
def test_sth(self):
test_obj_a_class = self.apps.get_model('test_app', 'TestObjA')
test_obj_a = test_obj_a_class.objects.get(pk=self.obj_id)
self.assertEqual(test_obj_a.fk_dep, None)
| 40.681818
| 83
| 0.729236
| 381
| 2,685
| 4.698163
| 0.136483
| 0.12905
| 0.147486
| 0.116201
| 0.835754
| 0.803352
| 0.787151
| 0.787151
| 0.787151
| 0.787151
| 0
| 0.037811
| 0.162756
| 2,685
| 65
| 84
| 41.307692
| 0.758452
| 0
| 0
| 0.755102
| 0
| 0
| 0.131844
| 0.055121
| 0
| 0
| 0
| 0
| 0.102041
| 1
| 0.163265
| false
| 0
| 0.020408
| 0
| 0.408163
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
dbb83d1eb3794bbaa7dc048463a3516202edbb72
| 61
|
py
|
Python
|
CodeWars/8 Kyu/Count the number of cubes with paint on.py
|
anubhab-code/Competitive-Programming
|
de28cb7d44044b9e7d8bdb475da61e37c018ac35
|
[
"MIT"
] | null | null | null |
CodeWars/8 Kyu/Count the number of cubes with paint on.py
|
anubhab-code/Competitive-Programming
|
de28cb7d44044b9e7d8bdb475da61e37c018ac35
|
[
"MIT"
] | null | null | null |
CodeWars/8 Kyu/Count the number of cubes with paint on.py
|
anubhab-code/Competitive-Programming
|
de28cb7d44044b9e7d8bdb475da61e37c018ac35
|
[
"MIT"
] | null | null | null |
def count_squares(cuts):
return (cuts+1)**3 - (cuts-1)**3
| 30.5
| 36
| 0.622951
| 11
| 61
| 3.363636
| 0.636364
| 0.27027
| 0.324324
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.076923
| 0.147541
| 61
| 2
| 36
| 30.5
| 0.634615
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.5
| false
| 0
| 0
| 0.5
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
91d19d02179f54980c8cca09486cdb97cbd98b40
| 28,848
|
py
|
Python
|
watcher/tests/notifications/test_action_plan_notification.py
|
ajaytikoo/watcher
|
6dbac1f6ae7f3e10dfdcef5721fa4af7af54e159
|
[
"Apache-2.0"
] | 64
|
2015-10-18T02:57:24.000Z
|
2022-01-13T11:27:51.000Z
|
watcher/tests/notifications/test_action_plan_notification.py
|
ajaytikoo/watcher
|
6dbac1f6ae7f3e10dfdcef5721fa4af7af54e159
|
[
"Apache-2.0"
] | null | null | null |
watcher/tests/notifications/test_action_plan_notification.py
|
ajaytikoo/watcher
|
6dbac1f6ae7f3e10dfdcef5721fa4af7af54e159
|
[
"Apache-2.0"
] | 35
|
2015-12-25T13:53:21.000Z
|
2021-07-19T15:50:16.000Z
|
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import freezegun
from unittest import mock
import oslo_messaging as om
from watcher.common import exception
from watcher.common import rpc
from watcher import notifications
from watcher import objects
from watcher.tests.db import base
from watcher.tests.objects import utils
@freezegun.freeze_time('2016-10-18T09:52:05.219414')
class TestActionPlanNotification(base.DbTestCase):
def setUp(self):
super(TestActionPlanNotification, self).setUp()
p_get_notifier = mock.patch.object(rpc, 'get_notifier')
m_get_notifier = p_get_notifier.start()
self.addCleanup(p_get_notifier.stop)
self.m_notifier = mock.Mock(spec=om.Notifier)
def fake_get_notifier(publisher_id):
self.m_notifier.publisher_id = publisher_id
return self.m_notifier
m_get_notifier.side_effect = fake_get_notifier
self.goal = utils.create_test_goal(mock.Mock())
self.audit = utils.create_test_audit(mock.Mock(), interval=None)
self.strategy = utils.create_test_strategy(mock.Mock())
def test_send_invalid_action_plan(self):
action_plan = utils.get_test_action_plan(
mock.Mock(), state='DOESNOTMATTER', audit_id=1)
self.assertRaises(
exception.InvalidActionPlan,
notifications.action_plan.send_update,
mock.MagicMock(), action_plan, host='node0')
def test_send_action_plan_update(self):
action_plan = utils.create_test_action_plan(
mock.Mock(), state=objects.action_plan.State.ONGOING,
audit_id=self.audit.id, strategy_id=self.strategy.id,
audit=self.audit, strategy=self.strategy)
notifications.action_plan.send_update(
mock.MagicMock(), action_plan, host='node0',
old_state=objects.action_plan.State.PENDING)
# The 1st notification is because we created the object.
# The 2nd notification is because we created the action plan object.
self.assertEqual(3, self.m_notifier.info.call_count)
notification = self.m_notifier.info.call_args[1]
payload = notification['payload']
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual(
{
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.1",
"watcher_object.data": {
"global_efficacy": [],
"strategy_uuid": "cb3d0b58-4415-4d90-b75b-1e96878730e3",
"strategy": {
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.0",
"watcher_object.data": {
"updated_at": None,
"uuid": "cb3d0b58-4415-4d90-b75b-1e96878730e3",
"name": "TEST",
"parameters_spec": {},
"created_at": "2016-10-18T09:52:05Z",
"display_name": "test strategy",
"deleted_at": None
},
"watcher_object.name": "StrategyPayload"
},
"uuid": "76be87bd-3422-43f9-93a0-e85a577e3061",
"audit_uuid": "10a47dd1-4874-4298-91cf-eff046dbdb8d",
"audit": {
"watcher_object.data": {
"interval": None,
"next_run_time": None,
"auto_trigger": False,
"parameters": {},
"uuid": "10a47dd1-4874-4298-91cf-eff046dbdb8d",
"name": "My Audit",
"strategy_uuid": None,
"goal_uuid": (
"f7ad87ae-4298-91cf-93a0-f35a852e3652"),
"deleted_at": None,
"scope": [],
"state": "PENDING",
"updated_at": None,
"created_at": "2016-10-18T09:52:05Z",
"audit_type": "ONESHOT"
},
"watcher_object.name": "TerseAuditPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.2"
},
"deleted_at": None,
"state": "ONGOING",
"updated_at": None,
"created_at": "2016-10-18T09:52:05Z",
"state_update": {
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.0",
"watcher_object.data": {
"old_state": "PENDING",
"state": "ONGOING"
},
"watcher_object.name": "ActionPlanStateUpdatePayload"
},
},
"watcher_object.name": "ActionPlanUpdatePayload"
},
payload
)
def test_send_action_plan_create(self):
action_plan = utils.get_test_action_plan(
mock.Mock(), state=objects.action_plan.State.PENDING,
audit_id=self.audit.id, strategy_id=self.strategy.id,
audit=self.audit.as_dict(), strategy=self.strategy.as_dict())
notifications.action_plan.send_create(
mock.MagicMock(), action_plan, host='node0')
self.assertEqual(2, self.m_notifier.info.call_count)
notification = self.m_notifier.info.call_args[1]
payload = notification['payload']
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual(
{
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.1",
"watcher_object.data": {
"global_efficacy": [],
"strategy_uuid": "cb3d0b58-4415-4d90-b75b-1e96878730e3",
"strategy": {
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.0",
"watcher_object.data": {
"updated_at": None,
"uuid": "cb3d0b58-4415-4d90-b75b-1e96878730e3",
"name": "TEST",
"parameters_spec": {},
"created_at": "2016-10-18T09:52:05Z",
"display_name": "test strategy",
"deleted_at": None
},
"watcher_object.name": "StrategyPayload"
},
"uuid": "76be87bd-3422-43f9-93a0-e85a577e3061",
"audit_uuid": "10a47dd1-4874-4298-91cf-eff046dbdb8d",
"audit": {
"watcher_object.data": {
"interval": None,
"next_run_time": None,
"auto_trigger": False,
"parameters": {},
"uuid": "10a47dd1-4874-4298-91cf-eff046dbdb8d",
"name": "My Audit",
"strategy_uuid": None,
"goal_uuid": (
"f7ad87ae-4298-91cf-93a0-f35a852e3652"),
"deleted_at": None,
"scope": [],
"state": "PENDING",
"updated_at": None,
"created_at": "2016-10-18T09:52:05Z",
"audit_type": "ONESHOT"
},
"watcher_object.name": "TerseAuditPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.2"
},
"deleted_at": None,
"state": "PENDING",
"updated_at": None,
"created_at": None,
},
"watcher_object.name": "ActionPlanCreatePayload"
},
payload
)
def test_send_action_plan_delete(self):
action_plan = utils.create_test_action_plan(
mock.Mock(), state=objects.action_plan.State.DELETED,
audit_id=self.audit.id, strategy_id=self.strategy.id)
notifications.action_plan.send_delete(
mock.MagicMock(), action_plan, host='node0')
# The 1st notification is because we created the audit object.
# The 2nd notification is because we created the action plan object.
self.assertEqual(3, self.m_notifier.info.call_count)
notification = self.m_notifier.info.call_args[1]
payload = notification['payload']
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual(
{
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.1",
"watcher_object.data": {
"global_efficacy": [],
"strategy_uuid": "cb3d0b58-4415-4d90-b75b-1e96878730e3",
"strategy": {
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.0",
"watcher_object.data": {
"updated_at": None,
"uuid": "cb3d0b58-4415-4d90-b75b-1e96878730e3",
"name": "TEST",
"parameters_spec": {},
"created_at": "2016-10-18T09:52:05Z",
"display_name": "test strategy",
"deleted_at": None
},
"watcher_object.name": "StrategyPayload"
},
"uuid": "76be87bd-3422-43f9-93a0-e85a577e3061",
"audit_uuid": "10a47dd1-4874-4298-91cf-eff046dbdb8d",
"audit": {
"watcher_object.data": {
"interval": None,
"next_run_time": None,
"auto_trigger": False,
"parameters": {},
"uuid": "10a47dd1-4874-4298-91cf-eff046dbdb8d",
"name": "My Audit",
"strategy_uuid": None,
"goal_uuid": (
"f7ad87ae-4298-91cf-93a0-f35a852e3652"),
"deleted_at": None,
"scope": [],
"state": "PENDING",
"updated_at": None,
"created_at": "2016-10-18T09:52:05Z",
"audit_type": "ONESHOT"
},
"watcher_object.name": "TerseAuditPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.2"
},
"deleted_at": None,
"state": "DELETED",
"updated_at": None,
"created_at": "2016-10-18T09:52:05Z",
},
"watcher_object.name": "ActionPlanDeletePayload"
},
payload
)
def test_send_action_plan_action(self):
action_plan = utils.create_test_action_plan(
mock.Mock(), state=objects.action_plan.State.ONGOING,
audit_id=self.audit.id, strategy_id=self.strategy.id,
audit=self.audit, strategy=self.strategy)
notifications.action_plan.send_action_notification(
mock.MagicMock(), action_plan, host='node0',
action='execution', phase='start')
# The 1st notification is because we created the audit object.
# The 2nd notification is because we created the action plan object.
self.assertEqual(3, self.m_notifier.info.call_count)
notification = self.m_notifier.info.call_args[1]
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual(
{
"event_type": "action_plan.execution.start",
"payload": {
"watcher_object.data": {
"created_at": "2016-10-18T09:52:05Z",
"deleted_at": None,
"fault": None,
"audit_uuid": "10a47dd1-4874-4298-91cf-eff046dbdb8d",
"audit": {
"watcher_object.namespace": "watcher",
"watcher_object.name": "TerseAuditPayload",
"watcher_object.version": "1.2",
"watcher_object.data": {
"interval": None,
"next_run_time": None,
"auto_trigger": False,
"parameters": {},
"uuid": "10a47dd1-4874-4298-91cf-eff046dbdb8d",
"name": "My Audit",
"strategy_uuid": None,
"goal_uuid": (
"f7ad87ae-4298-91cf-93a0-f35a852e3652"),
"deleted_at": None,
"scope": [],
"state": "PENDING",
"updated_at": None,
"created_at": "2016-10-18T09:52:05Z",
"audit_type": "ONESHOT"
}
},
"global_efficacy": [],
"state": "ONGOING",
"strategy_uuid": (
"cb3d0b58-4415-4d90-b75b-1e96878730e3"),
"strategy": {
"watcher_object.data": {
"created_at": "2016-10-18T09:52:05Z",
"deleted_at": None,
"display_name": "test strategy",
"name": "TEST",
"parameters_spec": {},
"updated_at": None,
"uuid": "cb3d0b58-4415-4d90-b75b-1e96878730e3"
},
"watcher_object.name": "StrategyPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.0"
},
"updated_at": None,
"uuid": "76be87bd-3422-43f9-93a0-e85a577e3061"
},
"watcher_object.name": "ActionPlanActionPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.1"
}
},
notification
)
def test_send_action_plan_action_with_error(self):
action_plan = utils.create_test_action_plan(
mock.Mock(), state=objects.action_plan.State.ONGOING,
audit_id=self.audit.id, strategy_id=self.strategy.id,
audit=self.audit, strategy=self.strategy)
try:
# This is to load the exception in sys.exc_info()
raise exception.WatcherException("TEST")
except exception.WatcherException:
notifications.action_plan.send_action_notification(
mock.MagicMock(), action_plan, host='node0',
action='execution', priority='error', phase='error')
self.assertEqual(1, self.m_notifier.error.call_count)
notification = self.m_notifier.error.call_args[1]
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual(
{
"event_type": "action_plan.execution.error",
"payload": {
"watcher_object.data": {
"created_at": "2016-10-18T09:52:05Z",
"deleted_at": None,
"fault": {
"watcher_object.data": {
"exception": "WatcherException",
"exception_message": "TEST",
"function_name": (
"test_send_action_plan_action_with_error"),
"module_name": "watcher.tests.notifications."
"test_action_plan_notification"
},
"watcher_object.name": "ExceptionPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.0"
},
"audit_uuid": "10a47dd1-4874-4298-91cf-eff046dbdb8d",
"audit": {
"watcher_object.data": {
"interval": None,
"next_run_time": None,
"auto_trigger": False,
"parameters": {},
"uuid": "10a47dd1-4874-4298-91cf-eff046dbdb8d",
"name": "My Audit",
"strategy_uuid": None,
"goal_uuid": (
"f7ad87ae-4298-91cf-93a0-f35a852e3652"),
"deleted_at": None,
"scope": [],
"state": "PENDING",
"updated_at": None,
"created_at": "2016-10-18T09:52:05Z",
"audit_type": "ONESHOT"
},
"watcher_object.name": "TerseAuditPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.2"
},
"global_efficacy": [],
"state": "ONGOING",
"strategy_uuid": (
"cb3d0b58-4415-4d90-b75b-1e96878730e3"),
"strategy": {
"watcher_object.data": {
"created_at": "2016-10-18T09:52:05Z",
"deleted_at": None,
"display_name": "test strategy",
"name": "TEST",
"parameters_spec": {},
"updated_at": None,
"uuid": "cb3d0b58-4415-4d90-b75b-1e96878730e3"
},
"watcher_object.name": "StrategyPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.0"
},
"updated_at": None,
"uuid": "76be87bd-3422-43f9-93a0-e85a577e3061"
},
"watcher_object.name": "ActionPlanActionPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.1"
}
},
notification
)
def test_send_action_plan_cancel(self):
action_plan = utils.create_test_action_plan(
mock.Mock(), state=objects.action_plan.State.ONGOING,
audit_id=self.audit.id, strategy_id=self.strategy.id,
audit=self.audit, strategy=self.strategy)
notifications.action_plan.send_cancel_notification(
mock.MagicMock(), action_plan, host='node0',
action='cancel', phase='start')
# The 1st notification is because we created the audit object.
# The 2nd notification is because we created the action plan object.
self.assertEqual(3, self.m_notifier.info.call_count)
notification = self.m_notifier.info.call_args[1]
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual(
{
"event_type": "action_plan.cancel.start",
"payload": {
"watcher_object.data": {
"created_at": "2016-10-18T09:52:05Z",
"deleted_at": None,
"fault": None,
"audit_uuid": "10a47dd1-4874-4298-91cf-eff046dbdb8d",
"audit": {
"watcher_object.namespace": "watcher",
"watcher_object.name": "TerseAuditPayload",
"watcher_object.version": "1.2",
"watcher_object.data": {
"interval": None,
"next_run_time": None,
"auto_trigger": False,
"parameters": {},
"uuid": "10a47dd1-4874-4298-91cf-eff046dbdb8d",
'name': 'My Audit',
"strategy_uuid": None,
"goal_uuid": (
"f7ad87ae-4298-91cf-93a0-f35a852e3652"),
"deleted_at": None,
"scope": [],
"state": "PENDING",
"updated_at": None,
"created_at": "2016-10-18T09:52:05Z",
"audit_type": "ONESHOT"
}
},
"global_efficacy": [],
"state": "ONGOING",
"strategy_uuid": (
"cb3d0b58-4415-4d90-b75b-1e96878730e3"),
"strategy": {
"watcher_object.data": {
"created_at": "2016-10-18T09:52:05Z",
"deleted_at": None,
"display_name": "test strategy",
"name": "TEST",
"parameters_spec": {},
"updated_at": None,
"uuid": "cb3d0b58-4415-4d90-b75b-1e96878730e3"
},
"watcher_object.name": "StrategyPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.0"
},
"updated_at": None,
"uuid": "76be87bd-3422-43f9-93a0-e85a577e3061"
},
"watcher_object.name": "ActionPlanCancelPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.1"
}
},
notification
)
def test_send_action_plan_cancel_with_error(self):
action_plan = utils.create_test_action_plan(
mock.Mock(), state=objects.action_plan.State.ONGOING,
audit_id=self.audit.id, strategy_id=self.strategy.id,
audit=self.audit, strategy=self.strategy)
try:
# This is to load the exception in sys.exc_info()
raise exception.WatcherException("TEST")
except exception.WatcherException:
notifications.action_plan.send_cancel_notification(
mock.MagicMock(), action_plan, host='node0',
action='cancel', priority='error', phase='error')
self.assertEqual(1, self.m_notifier.error.call_count)
notification = self.m_notifier.error.call_args[1]
self.assertEqual("infra-optim:node0", self.m_notifier.publisher_id)
self.assertDictEqual(
{
"event_type": "action_plan.cancel.error",
"payload": {
"watcher_object.data": {
"created_at": "2016-10-18T09:52:05Z",
"deleted_at": None,
"fault": {
"watcher_object.data": {
"exception": "WatcherException",
"exception_message": "TEST",
"function_name": (
"test_send_action_plan_cancel_with_error"),
"module_name": "watcher.tests.notifications."
"test_action_plan_notification"
},
"watcher_object.name": "ExceptionPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.0"
},
"audit_uuid": "10a47dd1-4874-4298-91cf-eff046dbdb8d",
"audit": {
"watcher_object.data": {
"interval": None,
"next_run_time": None,
"auto_trigger": False,
"parameters": {},
"uuid": "10a47dd1-4874-4298-91cf-eff046dbdb8d",
'name': 'My Audit',
"strategy_uuid": None,
"goal_uuid": (
"f7ad87ae-4298-91cf-93a0-f35a852e3652"),
"deleted_at": None,
"scope": [],
"state": "PENDING",
"updated_at": None,
"created_at": "2016-10-18T09:52:05Z",
"audit_type": "ONESHOT"
},
"watcher_object.name": "TerseAuditPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.2"
},
"global_efficacy": [],
"state": "ONGOING",
"strategy_uuid": (
"cb3d0b58-4415-4d90-b75b-1e96878730e3"),
"strategy": {
"watcher_object.data": {
"created_at": "2016-10-18T09:52:05Z",
"deleted_at": None,
"display_name": "test strategy",
"name": "TEST",
"parameters_spec": {},
"updated_at": None,
"uuid": "cb3d0b58-4415-4d90-b75b-1e96878730e3"
},
"watcher_object.name": "StrategyPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.0"
},
"updated_at": None,
"uuid": "76be87bd-3422-43f9-93a0-e85a577e3061"
},
"watcher_object.name": "ActionPlanCancelPayload",
"watcher_object.namespace": "watcher",
"watcher_object.version": "1.1"
}
},
notification
)
| 48.160267
| 79
| 0.440897
| 2,246
| 28,848
| 5.455476
| 0.095281
| 0.101853
| 0.025463
| 0.056802
| 0.869828
| 0.864441
| 0.848527
| 0.844691
| 0.841508
| 0.841508
| 0
| 0.081911
| 0.455768
| 28,848
| 598
| 80
| 48.240803
| 0.698535
| 0.040627
| 0
| 0.761468
| 0
| 0
| 0.27998
| 0.113012
| 0
| 0
| 0
| 0
| 0.040367
| 1
| 0.018349
| false
| 0
| 0.016514
| 0
| 0.038532
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91d8736df0d6cf13913654495be4f39c389cdee0
| 164,209
|
py
|
Python
|
google/appengine/api/remote_socket/remote_socket_service_pb.py
|
enpi/Test
|
5fb2055c7cfd4cc91ff97471c529b041f21abeb6
|
[
"Apache-2.0"
] | 3
|
2019-01-28T03:57:20.000Z
|
2020-02-20T01:37:33.000Z
|
google/appengine/api/remote_socket/remote_socket_service_pb.py
|
enpi/Test
|
5fb2055c7cfd4cc91ff97471c529b041f21abeb6
|
[
"Apache-2.0"
] | null | null | null |
google/appengine/api/remote_socket/remote_socket_service_pb.py
|
enpi/Test
|
5fb2055c7cfd4cc91ff97471c529b041f21abeb6
|
[
"Apache-2.0"
] | 3
|
2019-01-18T11:33:56.000Z
|
2020-01-05T10:44:05.000Z
|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
if hasattr(ProtocolBuffer, 'ExtendableProtocolMessage'):
_extension_runtime = True
_ExtendableProtocolMessage = ProtocolBuffer.ExtendableProtocolMessage
else:
_extension_runtime = False
_ExtendableProtocolMessage = ProtocolBuffer.ProtocolMessage
class RemoteSocketServiceError(ProtocolBuffer.ProtocolMessage):
SYSTEM_ERROR = 1
GAI_ERROR = 2
FAILURE = 4
PERMISSION_DENIED = 5
INVALID_REQUEST = 6
SOCKET_CLOSED = 7
_ErrorCode_NAMES = {
1: "SYSTEM_ERROR",
2: "GAI_ERROR",
4: "FAILURE",
5: "PERMISSION_DENIED",
6: "INVALID_REQUEST",
7: "SOCKET_CLOSED",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
SYS_SUCCESS = 0
SYS_EPERM = 1
SYS_ENOENT = 2
SYS_ESRCH = 3
SYS_EINTR = 4
SYS_EIO = 5
SYS_ENXIO = 6
SYS_E2BIG = 7
SYS_ENOEXEC = 8
SYS_EBADF = 9
SYS_ECHILD = 10
SYS_EAGAIN = 11
SYS_EWOULDBLOCK = 11
SYS_ENOMEM = 12
SYS_EACCES = 13
SYS_EFAULT = 14
SYS_ENOTBLK = 15
SYS_EBUSY = 16
SYS_EEXIST = 17
SYS_EXDEV = 18
SYS_ENODEV = 19
SYS_ENOTDIR = 20
SYS_EISDIR = 21
SYS_EINVAL = 22
SYS_ENFILE = 23
SYS_EMFILE = 24
SYS_ENOTTY = 25
SYS_ETXTBSY = 26
SYS_EFBIG = 27
SYS_ENOSPC = 28
SYS_ESPIPE = 29
SYS_EROFS = 30
SYS_EMLINK = 31
SYS_EPIPE = 32
SYS_EDOM = 33
SYS_ERANGE = 34
SYS_EDEADLK = 35
SYS_EDEADLOCK = 35
SYS_ENAMETOOLONG = 36
SYS_ENOLCK = 37
SYS_ENOSYS = 38
SYS_ENOTEMPTY = 39
SYS_ELOOP = 40
SYS_ENOMSG = 42
SYS_EIDRM = 43
SYS_ECHRNG = 44
SYS_EL2NSYNC = 45
SYS_EL3HLT = 46
SYS_EL3RST = 47
SYS_ELNRNG = 48
SYS_EUNATCH = 49
SYS_ENOCSI = 50
SYS_EL2HLT = 51
SYS_EBADE = 52
SYS_EBADR = 53
SYS_EXFULL = 54
SYS_ENOANO = 55
SYS_EBADRQC = 56
SYS_EBADSLT = 57
SYS_EBFONT = 59
SYS_ENOSTR = 60
SYS_ENODATA = 61
SYS_ETIME = 62
SYS_ENOSR = 63
SYS_ENONET = 64
SYS_ENOPKG = 65
SYS_EREMOTE = 66
SYS_ENOLINK = 67
SYS_EADV = 68
SYS_ESRMNT = 69
SYS_ECOMM = 70
SYS_EPROTO = 71
SYS_EMULTIHOP = 72
SYS_EDOTDOT = 73
SYS_EBADMSG = 74
SYS_EOVERFLOW = 75
SYS_ENOTUNIQ = 76
SYS_EBADFD = 77
SYS_EREMCHG = 78
SYS_ELIBACC = 79
SYS_ELIBBAD = 80
SYS_ELIBSCN = 81
SYS_ELIBMAX = 82
SYS_ELIBEXEC = 83
SYS_EILSEQ = 84
SYS_ERESTART = 85
SYS_ESTRPIPE = 86
SYS_EUSERS = 87
SYS_ENOTSOCK = 88
SYS_EDESTADDRREQ = 89
SYS_EMSGSIZE = 90
SYS_EPROTOTYPE = 91
SYS_ENOPROTOOPT = 92
SYS_EPROTONOSUPPORT = 93
SYS_ESOCKTNOSUPPORT = 94
SYS_EOPNOTSUPP = 95
SYS_ENOTSUP = 95
SYS_EPFNOSUPPORT = 96
SYS_EAFNOSUPPORT = 97
SYS_EADDRINUSE = 98
SYS_EADDRNOTAVAIL = 99
SYS_ENETDOWN = 100
SYS_ENETUNREACH = 101
SYS_ENETRESET = 102
SYS_ECONNABORTED = 103
SYS_ECONNRESET = 104
SYS_ENOBUFS = 105
SYS_EISCONN = 106
SYS_ENOTCONN = 107
SYS_ESHUTDOWN = 108
SYS_ETOOMANYREFS = 109
SYS_ETIMEDOUT = 110
SYS_ECONNREFUSED = 111
SYS_EHOSTDOWN = 112
SYS_EHOSTUNREACH = 113
SYS_EALREADY = 114
SYS_EINPROGRESS = 115
SYS_ESTALE = 116
SYS_EUCLEAN = 117
SYS_ENOTNAM = 118
SYS_ENAVAIL = 119
SYS_EISNAM = 120
SYS_EREMOTEIO = 121
SYS_EDQUOT = 122
SYS_ENOMEDIUM = 123
SYS_EMEDIUMTYPE = 124
SYS_ECANCELED = 125
SYS_ENOKEY = 126
SYS_EKEYEXPIRED = 127
SYS_EKEYREVOKED = 128
SYS_EKEYREJECTED = 129
SYS_EOWNERDEAD = 130
SYS_ENOTRECOVERABLE = 131
SYS_ERFKILL = 132
_SystemError_NAMES = {
0: "SYS_SUCCESS",
1: "SYS_EPERM",
2: "SYS_ENOENT",
3: "SYS_ESRCH",
4: "SYS_EINTR",
5: "SYS_EIO",
6: "SYS_ENXIO",
7: "SYS_E2BIG",
8: "SYS_ENOEXEC",
9: "SYS_EBADF",
10: "SYS_ECHILD",
11: "SYS_EAGAIN",
11: "SYS_EWOULDBLOCK",
12: "SYS_ENOMEM",
13: "SYS_EACCES",
14: "SYS_EFAULT",
15: "SYS_ENOTBLK",
16: "SYS_EBUSY",
17: "SYS_EEXIST",
18: "SYS_EXDEV",
19: "SYS_ENODEV",
20: "SYS_ENOTDIR",
21: "SYS_EISDIR",
22: "SYS_EINVAL",
23: "SYS_ENFILE",
24: "SYS_EMFILE",
25: "SYS_ENOTTY",
26: "SYS_ETXTBSY",
27: "SYS_EFBIG",
28: "SYS_ENOSPC",
29: "SYS_ESPIPE",
30: "SYS_EROFS",
31: "SYS_EMLINK",
32: "SYS_EPIPE",
33: "SYS_EDOM",
34: "SYS_ERANGE",
35: "SYS_EDEADLK",
35: "SYS_EDEADLOCK",
36: "SYS_ENAMETOOLONG",
37: "SYS_ENOLCK",
38: "SYS_ENOSYS",
39: "SYS_ENOTEMPTY",
40: "SYS_ELOOP",
42: "SYS_ENOMSG",
43: "SYS_EIDRM",
44: "SYS_ECHRNG",
45: "SYS_EL2NSYNC",
46: "SYS_EL3HLT",
47: "SYS_EL3RST",
48: "SYS_ELNRNG",
49: "SYS_EUNATCH",
50: "SYS_ENOCSI",
51: "SYS_EL2HLT",
52: "SYS_EBADE",
53: "SYS_EBADR",
54: "SYS_EXFULL",
55: "SYS_ENOANO",
56: "SYS_EBADRQC",
57: "SYS_EBADSLT",
59: "SYS_EBFONT",
60: "SYS_ENOSTR",
61: "SYS_ENODATA",
62: "SYS_ETIME",
63: "SYS_ENOSR",
64: "SYS_ENONET",
65: "SYS_ENOPKG",
66: "SYS_EREMOTE",
67: "SYS_ENOLINK",
68: "SYS_EADV",
69: "SYS_ESRMNT",
70: "SYS_ECOMM",
71: "SYS_EPROTO",
72: "SYS_EMULTIHOP",
73: "SYS_EDOTDOT",
74: "SYS_EBADMSG",
75: "SYS_EOVERFLOW",
76: "SYS_ENOTUNIQ",
77: "SYS_EBADFD",
78: "SYS_EREMCHG",
79: "SYS_ELIBACC",
80: "SYS_ELIBBAD",
81: "SYS_ELIBSCN",
82: "SYS_ELIBMAX",
83: "SYS_ELIBEXEC",
84: "SYS_EILSEQ",
85: "SYS_ERESTART",
86: "SYS_ESTRPIPE",
87: "SYS_EUSERS",
88: "SYS_ENOTSOCK",
89: "SYS_EDESTADDRREQ",
90: "SYS_EMSGSIZE",
91: "SYS_EPROTOTYPE",
92: "SYS_ENOPROTOOPT",
93: "SYS_EPROTONOSUPPORT",
94: "SYS_ESOCKTNOSUPPORT",
95: "SYS_EOPNOTSUPP",
95: "SYS_ENOTSUP",
96: "SYS_EPFNOSUPPORT",
97: "SYS_EAFNOSUPPORT",
98: "SYS_EADDRINUSE",
99: "SYS_EADDRNOTAVAIL",
100: "SYS_ENETDOWN",
101: "SYS_ENETUNREACH",
102: "SYS_ENETRESET",
103: "SYS_ECONNABORTED",
104: "SYS_ECONNRESET",
105: "SYS_ENOBUFS",
106: "SYS_EISCONN",
107: "SYS_ENOTCONN",
108: "SYS_ESHUTDOWN",
109: "SYS_ETOOMANYREFS",
110: "SYS_ETIMEDOUT",
111: "SYS_ECONNREFUSED",
112: "SYS_EHOSTDOWN",
113: "SYS_EHOSTUNREACH",
114: "SYS_EALREADY",
115: "SYS_EINPROGRESS",
116: "SYS_ESTALE",
117: "SYS_EUCLEAN",
118: "SYS_ENOTNAM",
119: "SYS_ENAVAIL",
120: "SYS_EISNAM",
121: "SYS_EREMOTEIO",
122: "SYS_EDQUOT",
123: "SYS_ENOMEDIUM",
124: "SYS_EMEDIUMTYPE",
125: "SYS_ECANCELED",
126: "SYS_ENOKEY",
127: "SYS_EKEYEXPIRED",
128: "SYS_EKEYREVOKED",
129: "SYS_EKEYREJECTED",
130: "SYS_EOWNERDEAD",
131: "SYS_ENOTRECOVERABLE",
132: "SYS_ERFKILL",
}
def SystemError_Name(cls, x): return cls._SystemError_NAMES.get(x, "")
SystemError_Name = classmethod(SystemError_Name)
has_system_error_ = 0
system_error_ = 0
has_error_detail_ = 0
error_detail_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def system_error(self): return self.system_error_
def set_system_error(self, x):
self.has_system_error_ = 1
self.system_error_ = x
def clear_system_error(self):
if self.has_system_error_:
self.has_system_error_ = 0
self.system_error_ = 0
def has_system_error(self): return self.has_system_error_
def error_detail(self): return self.error_detail_
def set_error_detail(self, x):
self.has_error_detail_ = 1
self.error_detail_ = x
def clear_error_detail(self):
if self.has_error_detail_:
self.has_error_detail_ = 0
self.error_detail_ = ""
def has_error_detail(self): return self.has_error_detail_
def MergeFrom(self, x):
assert x is not self
if (x.has_system_error()): self.set_system_error(x.system_error())
if (x.has_error_detail()): self.set_error_detail(x.error_detail())
def Equals(self, x):
if x is self: return 1
if self.has_system_error_ != x.has_system_error_: return 0
if self.has_system_error_ and self.system_error_ != x.system_error_: return 0
if self.has_error_detail_ != x.has_error_detail_: return 0
if self.has_error_detail_ and self.error_detail_ != x.error_detail_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_system_error_): n += 1 + self.lengthVarInt64(self.system_error_)
if (self.has_error_detail_): n += 1 + self.lengthString(len(self.error_detail_))
return n
def ByteSizePartial(self):
n = 0
if (self.has_system_error_): n += 1 + self.lengthVarInt64(self.system_error_)
if (self.has_error_detail_): n += 1 + self.lengthString(len(self.error_detail_))
return n
def Clear(self):
self.clear_system_error()
self.clear_error_detail()
def OutputUnchecked(self, out):
if (self.has_system_error_):
out.putVarInt32(8)
out.putVarInt32(self.system_error_)
if (self.has_error_detail_):
out.putVarInt32(18)
out.putPrefixedString(self.error_detail_)
def OutputPartial(self, out):
if (self.has_system_error_):
out.putVarInt32(8)
out.putVarInt32(self.system_error_)
if (self.has_error_detail_):
out.putVarInt32(18)
out.putPrefixedString(self.error_detail_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_system_error(d.getVarInt32())
continue
if tt == 18:
self.set_error_detail(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_system_error_: res+=prefix+("system_error: %s\n" % self.DebugFormatInt32(self.system_error_))
if self.has_error_detail_: res+=prefix+("error_detail: %s\n" % self.DebugFormatString(self.error_detail_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksystem_error = 1
kerror_detail = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "system_error",
2: "error_detail",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.RemoteSocketServiceError'
class AddressPort(ProtocolBuffer.ProtocolMessage):
has_port_ = 0
port_ = 0
has_packed_address_ = 0
packed_address_ = ""
has_hostname_hint_ = 0
hostname_hint_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def port(self): return self.port_
def set_port(self, x):
self.has_port_ = 1
self.port_ = x
def clear_port(self):
if self.has_port_:
self.has_port_ = 0
self.port_ = 0
def has_port(self): return self.has_port_
def packed_address(self): return self.packed_address_
def set_packed_address(self, x):
self.has_packed_address_ = 1
self.packed_address_ = x
def clear_packed_address(self):
if self.has_packed_address_:
self.has_packed_address_ = 0
self.packed_address_ = ""
def has_packed_address(self): return self.has_packed_address_
def hostname_hint(self): return self.hostname_hint_
def set_hostname_hint(self, x):
self.has_hostname_hint_ = 1
self.hostname_hint_ = x
def clear_hostname_hint(self):
if self.has_hostname_hint_:
self.has_hostname_hint_ = 0
self.hostname_hint_ = ""
def has_hostname_hint(self): return self.has_hostname_hint_
def MergeFrom(self, x):
assert x is not self
if (x.has_port()): self.set_port(x.port())
if (x.has_packed_address()): self.set_packed_address(x.packed_address())
if (x.has_hostname_hint()): self.set_hostname_hint(x.hostname_hint())
def Equals(self, x):
if x is self: return 1
if self.has_port_ != x.has_port_: return 0
if self.has_port_ and self.port_ != x.port_: return 0
if self.has_packed_address_ != x.has_packed_address_: return 0
if self.has_packed_address_ and self.packed_address_ != x.packed_address_: return 0
if self.has_hostname_hint_ != x.has_hostname_hint_: return 0
if self.has_hostname_hint_ and self.hostname_hint_ != x.hostname_hint_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_port_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: port not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.port_)
if (self.has_packed_address_): n += 1 + self.lengthString(len(self.packed_address_))
if (self.has_hostname_hint_): n += 1 + self.lengthString(len(self.hostname_hint_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_port_):
n += 1
n += self.lengthVarInt64(self.port_)
if (self.has_packed_address_): n += 1 + self.lengthString(len(self.packed_address_))
if (self.has_hostname_hint_): n += 1 + self.lengthString(len(self.hostname_hint_))
return n
def Clear(self):
self.clear_port()
self.clear_packed_address()
self.clear_hostname_hint()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt32(self.port_)
if (self.has_packed_address_):
out.putVarInt32(18)
out.putPrefixedString(self.packed_address_)
if (self.has_hostname_hint_):
out.putVarInt32(26)
out.putPrefixedString(self.hostname_hint_)
def OutputPartial(self, out):
if (self.has_port_):
out.putVarInt32(8)
out.putVarInt32(self.port_)
if (self.has_packed_address_):
out.putVarInt32(18)
out.putPrefixedString(self.packed_address_)
if (self.has_hostname_hint_):
out.putVarInt32(26)
out.putPrefixedString(self.hostname_hint_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_port(d.getVarInt32())
continue
if tt == 18:
self.set_packed_address(d.getPrefixedString())
continue
if tt == 26:
self.set_hostname_hint(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_port_: res+=prefix+("port: %s\n" % self.DebugFormatInt32(self.port_))
if self.has_packed_address_: res+=prefix+("packed_address: %s\n" % self.DebugFormatString(self.packed_address_))
if self.has_hostname_hint_: res+=prefix+("hostname_hint: %s\n" % self.DebugFormatString(self.hostname_hint_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kport = 1
kpacked_address = 2
khostname_hint = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "port",
2: "packed_address",
3: "hostname_hint",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.AddressPort'
class CreateSocketRequest(ProtocolBuffer.ProtocolMessage):
IPv4 = 1
IPv6 = 2
_SocketFamily_NAMES = {
1: "IPv4",
2: "IPv6",
}
def SocketFamily_Name(cls, x): return cls._SocketFamily_NAMES.get(x, "")
SocketFamily_Name = classmethod(SocketFamily_Name)
TCP = 1
UDP = 2
_SocketProtocol_NAMES = {
1: "TCP",
2: "UDP",
}
def SocketProtocol_Name(cls, x): return cls._SocketProtocol_NAMES.get(x, "")
SocketProtocol_Name = classmethod(SocketProtocol_Name)
has_family_ = 0
family_ = 0
has_protocol_ = 0
protocol_ = 0
has_proxy_external_ip_ = 0
proxy_external_ip_ = None
has_listen_backlog_ = 0
listen_backlog_ = 0
has_remote_ip_ = 0
remote_ip_ = None
has_app_id_ = 0
app_id_ = ""
def __init__(self, contents=None):
self.socket_options_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def family(self): return self.family_
def set_family(self, x):
self.has_family_ = 1
self.family_ = x
def clear_family(self):
if self.has_family_:
self.has_family_ = 0
self.family_ = 0
def has_family(self): return self.has_family_
def protocol(self): return self.protocol_
def set_protocol(self, x):
self.has_protocol_ = 1
self.protocol_ = x
def clear_protocol(self):
if self.has_protocol_:
self.has_protocol_ = 0
self.protocol_ = 0
def has_protocol(self): return self.has_protocol_
def socket_options_size(self): return len(self.socket_options_)
def socket_options_list(self): return self.socket_options_
def socket_options(self, i):
return self.socket_options_[i]
def mutable_socket_options(self, i):
return self.socket_options_[i]
def add_socket_options(self):
x = SocketOption()
self.socket_options_.append(x)
return x
def clear_socket_options(self):
self.socket_options_ = []
def proxy_external_ip(self):
if self.proxy_external_ip_ is None:
self.lazy_init_lock_.acquire()
try:
if self.proxy_external_ip_ is None: self.proxy_external_ip_ = AddressPort()
finally:
self.lazy_init_lock_.release()
return self.proxy_external_ip_
def mutable_proxy_external_ip(self): self.has_proxy_external_ip_ = 1; return self.proxy_external_ip()
def clear_proxy_external_ip(self):
if self.has_proxy_external_ip_:
self.has_proxy_external_ip_ = 0;
if self.proxy_external_ip_ is not None: self.proxy_external_ip_.Clear()
def has_proxy_external_ip(self): return self.has_proxy_external_ip_
def listen_backlog(self): return self.listen_backlog_
def set_listen_backlog(self, x):
self.has_listen_backlog_ = 1
self.listen_backlog_ = x
def clear_listen_backlog(self):
if self.has_listen_backlog_:
self.has_listen_backlog_ = 0
self.listen_backlog_ = 0
def has_listen_backlog(self): return self.has_listen_backlog_
def remote_ip(self):
if self.remote_ip_ is None:
self.lazy_init_lock_.acquire()
try:
if self.remote_ip_ is None: self.remote_ip_ = AddressPort()
finally:
self.lazy_init_lock_.release()
return self.remote_ip_
def mutable_remote_ip(self): self.has_remote_ip_ = 1; return self.remote_ip()
def clear_remote_ip(self):
if self.has_remote_ip_:
self.has_remote_ip_ = 0;
if self.remote_ip_ is not None: self.remote_ip_.Clear()
def has_remote_ip(self): return self.has_remote_ip_
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def MergeFrom(self, x):
assert x is not self
if (x.has_family()): self.set_family(x.family())
if (x.has_protocol()): self.set_protocol(x.protocol())
for i in xrange(x.socket_options_size()): self.add_socket_options().CopyFrom(x.socket_options(i))
if (x.has_proxy_external_ip()): self.mutable_proxy_external_ip().MergeFrom(x.proxy_external_ip())
if (x.has_listen_backlog()): self.set_listen_backlog(x.listen_backlog())
if (x.has_remote_ip()): self.mutable_remote_ip().MergeFrom(x.remote_ip())
if (x.has_app_id()): self.set_app_id(x.app_id())
def Equals(self, x):
if x is self: return 1
if self.has_family_ != x.has_family_: return 0
if self.has_family_ and self.family_ != x.family_: return 0
if self.has_protocol_ != x.has_protocol_: return 0
if self.has_protocol_ and self.protocol_ != x.protocol_: return 0
if len(self.socket_options_) != len(x.socket_options_): return 0
for e1, e2 in zip(self.socket_options_, x.socket_options_):
if e1 != e2: return 0
if self.has_proxy_external_ip_ != x.has_proxy_external_ip_: return 0
if self.has_proxy_external_ip_ and self.proxy_external_ip_ != x.proxy_external_ip_: return 0
if self.has_listen_backlog_ != x.has_listen_backlog_: return 0
if self.has_listen_backlog_ and self.listen_backlog_ != x.listen_backlog_: return 0
if self.has_remote_ip_ != x.has_remote_ip_: return 0
if self.has_remote_ip_ and self.remote_ip_ != x.remote_ip_: return 0
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_family_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: family not set.')
if (not self.has_protocol_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: protocol not set.')
for p in self.socket_options_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_proxy_external_ip_ and not self.proxy_external_ip_.IsInitialized(debug_strs)): initialized = 0
if (self.has_remote_ip_ and not self.remote_ip_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.family_)
n += self.lengthVarInt64(self.protocol_)
n += 1 * len(self.socket_options_)
for i in xrange(len(self.socket_options_)): n += self.lengthString(self.socket_options_[i].ByteSize())
if (self.has_proxy_external_ip_): n += 1 + self.lengthString(self.proxy_external_ip_.ByteSize())
if (self.has_listen_backlog_): n += 1 + self.lengthVarInt64(self.listen_backlog_)
if (self.has_remote_ip_): n += 1 + self.lengthString(self.remote_ip_.ByteSize())
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_family_):
n += 1
n += self.lengthVarInt64(self.family_)
if (self.has_protocol_):
n += 1
n += self.lengthVarInt64(self.protocol_)
n += 1 * len(self.socket_options_)
for i in xrange(len(self.socket_options_)): n += self.lengthString(self.socket_options_[i].ByteSizePartial())
if (self.has_proxy_external_ip_): n += 1 + self.lengthString(self.proxy_external_ip_.ByteSizePartial())
if (self.has_listen_backlog_): n += 1 + self.lengthVarInt64(self.listen_backlog_)
if (self.has_remote_ip_): n += 1 + self.lengthString(self.remote_ip_.ByteSizePartial())
if (self.has_app_id_): n += 1 + self.lengthString(len(self.app_id_))
return n
def Clear(self):
self.clear_family()
self.clear_protocol()
self.clear_socket_options()
self.clear_proxy_external_ip()
self.clear_listen_backlog()
self.clear_remote_ip()
self.clear_app_id()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt32(self.family_)
out.putVarInt32(16)
out.putVarInt32(self.protocol_)
for i in xrange(len(self.socket_options_)):
out.putVarInt32(26)
out.putVarInt32(self.socket_options_[i].ByteSize())
self.socket_options_[i].OutputUnchecked(out)
if (self.has_proxy_external_ip_):
out.putVarInt32(34)
out.putVarInt32(self.proxy_external_ip_.ByteSize())
self.proxy_external_ip_.OutputUnchecked(out)
if (self.has_listen_backlog_):
out.putVarInt32(40)
out.putVarInt32(self.listen_backlog_)
if (self.has_remote_ip_):
out.putVarInt32(50)
out.putVarInt32(self.remote_ip_.ByteSize())
self.remote_ip_.OutputUnchecked(out)
if (self.has_app_id_):
out.putVarInt32(74)
out.putPrefixedString(self.app_id_)
def OutputPartial(self, out):
if (self.has_family_):
out.putVarInt32(8)
out.putVarInt32(self.family_)
if (self.has_protocol_):
out.putVarInt32(16)
out.putVarInt32(self.protocol_)
for i in xrange(len(self.socket_options_)):
out.putVarInt32(26)
out.putVarInt32(self.socket_options_[i].ByteSizePartial())
self.socket_options_[i].OutputPartial(out)
if (self.has_proxy_external_ip_):
out.putVarInt32(34)
out.putVarInt32(self.proxy_external_ip_.ByteSizePartial())
self.proxy_external_ip_.OutputPartial(out)
if (self.has_listen_backlog_):
out.putVarInt32(40)
out.putVarInt32(self.listen_backlog_)
if (self.has_remote_ip_):
out.putVarInt32(50)
out.putVarInt32(self.remote_ip_.ByteSizePartial())
self.remote_ip_.OutputPartial(out)
if (self.has_app_id_):
out.putVarInt32(74)
out.putPrefixedString(self.app_id_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_family(d.getVarInt32())
continue
if tt == 16:
self.set_protocol(d.getVarInt32())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_socket_options().TryMerge(tmp)
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_proxy_external_ip().TryMerge(tmp)
continue
if tt == 40:
self.set_listen_backlog(d.getVarInt32())
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_remote_ip().TryMerge(tmp)
continue
if tt == 74:
self.set_app_id(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_family_: res+=prefix+("family: %s\n" % self.DebugFormatInt32(self.family_))
if self.has_protocol_: res+=prefix+("protocol: %s\n" % self.DebugFormatInt32(self.protocol_))
cnt=0
for e in self.socket_options_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("socket_options%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_proxy_external_ip_:
res+=prefix+"proxy_external_ip <\n"
res+=self.proxy_external_ip_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_listen_backlog_: res+=prefix+("listen_backlog: %s\n" % self.DebugFormatInt32(self.listen_backlog_))
if self.has_remote_ip_:
res+=prefix+"remote_ip <\n"
res+=self.remote_ip_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kfamily = 1
kprotocol = 2
ksocket_options = 3
kproxy_external_ip = 4
klisten_backlog = 5
kremote_ip = 6
kapp_id = 9
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "family",
2: "protocol",
3: "socket_options",
4: "proxy_external_ip",
5: "listen_backlog",
6: "remote_ip",
9: "app_id",
}, 9)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.NUMERIC,
6: ProtocolBuffer.Encoder.STRING,
9: ProtocolBuffer.Encoder.STRING,
}, 9, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.CreateSocketRequest'
class CreateSocketReply(_ExtendableProtocolMessage):
has_socket_descriptor_ = 0
socket_descriptor_ = ""
has_server_address_ = 0
server_address_ = None
has_proxy_external_ip_ = 0
proxy_external_ip_ = None
def __init__(self, contents=None):
if _extension_runtime:
self._extension_fields = {}
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def socket_descriptor(self): return self.socket_descriptor_
def set_socket_descriptor(self, x):
self.has_socket_descriptor_ = 1
self.socket_descriptor_ = x
def clear_socket_descriptor(self):
if self.has_socket_descriptor_:
self.has_socket_descriptor_ = 0
self.socket_descriptor_ = ""
def has_socket_descriptor(self): return self.has_socket_descriptor_
def server_address(self):
if self.server_address_ is None:
self.lazy_init_lock_.acquire()
try:
if self.server_address_ is None: self.server_address_ = AddressPort()
finally:
self.lazy_init_lock_.release()
return self.server_address_
def mutable_server_address(self): self.has_server_address_ = 1; return self.server_address()
def clear_server_address(self):
if self.has_server_address_:
self.has_server_address_ = 0;
if self.server_address_ is not None: self.server_address_.Clear()
def has_server_address(self): return self.has_server_address_
def proxy_external_ip(self):
if self.proxy_external_ip_ is None:
self.lazy_init_lock_.acquire()
try:
if self.proxy_external_ip_ is None: self.proxy_external_ip_ = AddressPort()
finally:
self.lazy_init_lock_.release()
return self.proxy_external_ip_
def mutable_proxy_external_ip(self): self.has_proxy_external_ip_ = 1; return self.proxy_external_ip()
def clear_proxy_external_ip(self):
if self.has_proxy_external_ip_:
self.has_proxy_external_ip_ = 0;
if self.proxy_external_ip_ is not None: self.proxy_external_ip_.Clear()
def has_proxy_external_ip(self): return self.has_proxy_external_ip_
def MergeFrom(self, x):
assert x is not self
if (x.has_socket_descriptor()): self.set_socket_descriptor(x.socket_descriptor())
if (x.has_server_address()): self.mutable_server_address().MergeFrom(x.server_address())
if (x.has_proxy_external_ip()): self.mutable_proxy_external_ip().MergeFrom(x.proxy_external_ip())
if _extension_runtime: self._MergeExtensionFields(x)
def Equals(self, x):
if x is self: return 1
if self.has_socket_descriptor_ != x.has_socket_descriptor_: return 0
if self.has_socket_descriptor_ and self.socket_descriptor_ != x.socket_descriptor_: return 0
if self.has_server_address_ != x.has_server_address_: return 0
if self.has_server_address_ and self.server_address_ != x.server_address_: return 0
if self.has_proxy_external_ip_ != x.has_proxy_external_ip_: return 0
if self.has_proxy_external_ip_ and self.proxy_external_ip_ != x.proxy_external_ip_: return 0
if _extension_runtime and not self._ExtensionEquals(x): return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_server_address_ and not self.server_address_.IsInitialized(debug_strs)): initialized = 0
if (self.has_proxy_external_ip_ and not self.proxy_external_ip_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_socket_descriptor_): n += 1 + self.lengthString(len(self.socket_descriptor_))
if (self.has_server_address_): n += 1 + self.lengthString(self.server_address_.ByteSize())
if (self.has_proxy_external_ip_): n += 1 + self.lengthString(self.proxy_external_ip_.ByteSize())
if _extension_runtime:
n += self._ExtensionByteSize(False)
return n
def ByteSizePartial(self):
n = 0
if (self.has_socket_descriptor_): n += 1 + self.lengthString(len(self.socket_descriptor_))
if (self.has_server_address_): n += 1 + self.lengthString(self.server_address_.ByteSizePartial())
if (self.has_proxy_external_ip_): n += 1 + self.lengthString(self.proxy_external_ip_.ByteSizePartial())
if _extension_runtime:
n += self._ExtensionByteSize(True)
return n
def Clear(self):
self.clear_socket_descriptor()
self.clear_server_address()
self.clear_proxy_external_ip()
if _extension_runtime: self._extension_fields.clear()
def OutputUnchecked(self, out):
if _extension_runtime:
extensions = self._ListExtensions()
extension_index = 0
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
if (self.has_server_address_):
out.putVarInt32(26)
out.putVarInt32(self.server_address_.ByteSize())
self.server_address_.OutputUnchecked(out)
if (self.has_proxy_external_ip_):
out.putVarInt32(34)
out.putVarInt32(self.proxy_external_ip_.ByteSize())
self.proxy_external_ip_.OutputUnchecked(out)
if _extension_runtime:
extension_index = self._OutputExtensionFields(out, False, extensions, extension_index, 536870912)
def OutputPartial(self, out):
if _extension_runtime:
extensions = self._ListExtensions()
extension_index = 0
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
if (self.has_server_address_):
out.putVarInt32(26)
out.putVarInt32(self.server_address_.ByteSizePartial())
self.server_address_.OutputPartial(out)
if (self.has_proxy_external_ip_):
out.putVarInt32(34)
out.putVarInt32(self.proxy_external_ip_.ByteSizePartial())
self.proxy_external_ip_.OutputPartial(out)
if _extension_runtime:
extension_index = self._OutputExtensionFields(out, True, extensions, extension_index, 536870912)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_socket_descriptor(d.getPrefixedString())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_server_address().TryMerge(tmp)
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_proxy_external_ip().TryMerge(tmp)
continue
if _extension_runtime:
if (1000 <= tt):
self._ParseOneExtensionField(tt, d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_socket_descriptor_: res+=prefix+("socket_descriptor: %s\n" % self.DebugFormatString(self.socket_descriptor_))
if self.has_server_address_:
res+=prefix+"server_address <\n"
res+=self.server_address_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_proxy_external_ip_:
res+=prefix+"proxy_external_ip <\n"
res+=self.proxy_external_ip_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if _extension_runtime:
res+=self._ExtensionDebugString(prefix, printElemNumber)
return res
if _extension_runtime:
_extensions_by_field_number = {}
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksocket_descriptor = 1
kserver_address = 3
kproxy_external_ip = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "socket_descriptor",
3: "server_address",
4: "proxy_external_ip",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STRING,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.CreateSocketReply'
class BindRequest(ProtocolBuffer.ProtocolMessage):
has_socket_descriptor_ = 0
socket_descriptor_ = ""
has_proxy_external_ip_ = 0
def __init__(self, contents=None):
self.proxy_external_ip_ = AddressPort()
if contents is not None: self.MergeFromString(contents)
def socket_descriptor(self): return self.socket_descriptor_
def set_socket_descriptor(self, x):
self.has_socket_descriptor_ = 1
self.socket_descriptor_ = x
def clear_socket_descriptor(self):
if self.has_socket_descriptor_:
self.has_socket_descriptor_ = 0
self.socket_descriptor_ = ""
def has_socket_descriptor(self): return self.has_socket_descriptor_
def proxy_external_ip(self): return self.proxy_external_ip_
def mutable_proxy_external_ip(self): self.has_proxy_external_ip_ = 1; return self.proxy_external_ip_
def clear_proxy_external_ip(self):self.has_proxy_external_ip_ = 0; self.proxy_external_ip_.Clear()
def has_proxy_external_ip(self): return self.has_proxy_external_ip_
def MergeFrom(self, x):
assert x is not self
if (x.has_socket_descriptor()): self.set_socket_descriptor(x.socket_descriptor())
if (x.has_proxy_external_ip()): self.mutable_proxy_external_ip().MergeFrom(x.proxy_external_ip())
def Equals(self, x):
if x is self: return 1
if self.has_socket_descriptor_ != x.has_socket_descriptor_: return 0
if self.has_socket_descriptor_ and self.socket_descriptor_ != x.socket_descriptor_: return 0
if self.has_proxy_external_ip_ != x.has_proxy_external_ip_: return 0
if self.has_proxy_external_ip_ and self.proxy_external_ip_ != x.proxy_external_ip_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_socket_descriptor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: socket_descriptor not set.')
if (not self.has_proxy_external_ip_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: proxy_external_ip not set.')
elif not self.proxy_external_ip_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.socket_descriptor_))
n += self.lengthString(self.proxy_external_ip_.ByteSize())
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_socket_descriptor_):
n += 1
n += self.lengthString(len(self.socket_descriptor_))
if (self.has_proxy_external_ip_):
n += 1
n += self.lengthString(self.proxy_external_ip_.ByteSizePartial())
return n
def Clear(self):
self.clear_socket_descriptor()
self.clear_proxy_external_ip()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
out.putVarInt32(18)
out.putVarInt32(self.proxy_external_ip_.ByteSize())
self.proxy_external_ip_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
if (self.has_proxy_external_ip_):
out.putVarInt32(18)
out.putVarInt32(self.proxy_external_ip_.ByteSizePartial())
self.proxy_external_ip_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_socket_descriptor(d.getPrefixedString())
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_proxy_external_ip().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_socket_descriptor_: res+=prefix+("socket_descriptor: %s\n" % self.DebugFormatString(self.socket_descriptor_))
if self.has_proxy_external_ip_:
res+=prefix+"proxy_external_ip <\n"
res+=self.proxy_external_ip_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksocket_descriptor = 1
kproxy_external_ip = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "socket_descriptor",
2: "proxy_external_ip",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.BindRequest'
class BindReply(ProtocolBuffer.ProtocolMessage):
has_proxy_external_ip_ = 0
proxy_external_ip_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def proxy_external_ip(self):
if self.proxy_external_ip_ is None:
self.lazy_init_lock_.acquire()
try:
if self.proxy_external_ip_ is None: self.proxy_external_ip_ = AddressPort()
finally:
self.lazy_init_lock_.release()
return self.proxy_external_ip_
def mutable_proxy_external_ip(self): self.has_proxy_external_ip_ = 1; return self.proxy_external_ip()
def clear_proxy_external_ip(self):
if self.has_proxy_external_ip_:
self.has_proxy_external_ip_ = 0;
if self.proxy_external_ip_ is not None: self.proxy_external_ip_.Clear()
def has_proxy_external_ip(self): return self.has_proxy_external_ip_
def MergeFrom(self, x):
assert x is not self
if (x.has_proxy_external_ip()): self.mutable_proxy_external_ip().MergeFrom(x.proxy_external_ip())
def Equals(self, x):
if x is self: return 1
if self.has_proxy_external_ip_ != x.has_proxy_external_ip_: return 0
if self.has_proxy_external_ip_ and self.proxy_external_ip_ != x.proxy_external_ip_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_proxy_external_ip_ and not self.proxy_external_ip_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_proxy_external_ip_): n += 1 + self.lengthString(self.proxy_external_ip_.ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_proxy_external_ip_): n += 1 + self.lengthString(self.proxy_external_ip_.ByteSizePartial())
return n
def Clear(self):
self.clear_proxy_external_ip()
def OutputUnchecked(self, out):
if (self.has_proxy_external_ip_):
out.putVarInt32(10)
out.putVarInt32(self.proxy_external_ip_.ByteSize())
self.proxy_external_ip_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_proxy_external_ip_):
out.putVarInt32(10)
out.putVarInt32(self.proxy_external_ip_.ByteSizePartial())
self.proxy_external_ip_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_proxy_external_ip().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_proxy_external_ip_:
res+=prefix+"proxy_external_ip <\n"
res+=self.proxy_external_ip_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kproxy_external_ip = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "proxy_external_ip",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.BindReply'
class GetSocketNameRequest(ProtocolBuffer.ProtocolMessage):
has_socket_descriptor_ = 0
socket_descriptor_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def socket_descriptor(self): return self.socket_descriptor_
def set_socket_descriptor(self, x):
self.has_socket_descriptor_ = 1
self.socket_descriptor_ = x
def clear_socket_descriptor(self):
if self.has_socket_descriptor_:
self.has_socket_descriptor_ = 0
self.socket_descriptor_ = ""
def has_socket_descriptor(self): return self.has_socket_descriptor_
def MergeFrom(self, x):
assert x is not self
if (x.has_socket_descriptor()): self.set_socket_descriptor(x.socket_descriptor())
def Equals(self, x):
if x is self: return 1
if self.has_socket_descriptor_ != x.has_socket_descriptor_: return 0
if self.has_socket_descriptor_ and self.socket_descriptor_ != x.socket_descriptor_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_socket_descriptor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: socket_descriptor not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.socket_descriptor_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_socket_descriptor_):
n += 1
n += self.lengthString(len(self.socket_descriptor_))
return n
def Clear(self):
self.clear_socket_descriptor()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
def OutputPartial(self, out):
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_socket_descriptor(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_socket_descriptor_: res+=prefix+("socket_descriptor: %s\n" % self.DebugFormatString(self.socket_descriptor_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksocket_descriptor = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "socket_descriptor",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetSocketNameRequest'
class GetSocketNameReply(ProtocolBuffer.ProtocolMessage):
has_proxy_external_ip_ = 0
proxy_external_ip_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def proxy_external_ip(self):
if self.proxy_external_ip_ is None:
self.lazy_init_lock_.acquire()
try:
if self.proxy_external_ip_ is None: self.proxy_external_ip_ = AddressPort()
finally:
self.lazy_init_lock_.release()
return self.proxy_external_ip_
def mutable_proxy_external_ip(self): self.has_proxy_external_ip_ = 1; return self.proxy_external_ip()
def clear_proxy_external_ip(self):
if self.has_proxy_external_ip_:
self.has_proxy_external_ip_ = 0;
if self.proxy_external_ip_ is not None: self.proxy_external_ip_.Clear()
def has_proxy_external_ip(self): return self.has_proxy_external_ip_
def MergeFrom(self, x):
assert x is not self
if (x.has_proxy_external_ip()): self.mutable_proxy_external_ip().MergeFrom(x.proxy_external_ip())
def Equals(self, x):
if x is self: return 1
if self.has_proxy_external_ip_ != x.has_proxy_external_ip_: return 0
if self.has_proxy_external_ip_ and self.proxy_external_ip_ != x.proxy_external_ip_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_proxy_external_ip_ and not self.proxy_external_ip_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_proxy_external_ip_): n += 1 + self.lengthString(self.proxy_external_ip_.ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_proxy_external_ip_): n += 1 + self.lengthString(self.proxy_external_ip_.ByteSizePartial())
return n
def Clear(self):
self.clear_proxy_external_ip()
def OutputUnchecked(self, out):
if (self.has_proxy_external_ip_):
out.putVarInt32(18)
out.putVarInt32(self.proxy_external_ip_.ByteSize())
self.proxy_external_ip_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_proxy_external_ip_):
out.putVarInt32(18)
out.putVarInt32(self.proxy_external_ip_.ByteSizePartial())
self.proxy_external_ip_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_proxy_external_ip().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_proxy_external_ip_:
res+=prefix+"proxy_external_ip <\n"
res+=self.proxy_external_ip_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kproxy_external_ip = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
2: "proxy_external_ip",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetSocketNameReply'
class GetPeerNameRequest(ProtocolBuffer.ProtocolMessage):
has_socket_descriptor_ = 0
socket_descriptor_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def socket_descriptor(self): return self.socket_descriptor_
def set_socket_descriptor(self, x):
self.has_socket_descriptor_ = 1
self.socket_descriptor_ = x
def clear_socket_descriptor(self):
if self.has_socket_descriptor_:
self.has_socket_descriptor_ = 0
self.socket_descriptor_ = ""
def has_socket_descriptor(self): return self.has_socket_descriptor_
def MergeFrom(self, x):
assert x is not self
if (x.has_socket_descriptor()): self.set_socket_descriptor(x.socket_descriptor())
def Equals(self, x):
if x is self: return 1
if self.has_socket_descriptor_ != x.has_socket_descriptor_: return 0
if self.has_socket_descriptor_ and self.socket_descriptor_ != x.socket_descriptor_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_socket_descriptor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: socket_descriptor not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.socket_descriptor_))
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_socket_descriptor_):
n += 1
n += self.lengthString(len(self.socket_descriptor_))
return n
def Clear(self):
self.clear_socket_descriptor()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
def OutputPartial(self, out):
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_socket_descriptor(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_socket_descriptor_: res+=prefix+("socket_descriptor: %s\n" % self.DebugFormatString(self.socket_descriptor_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksocket_descriptor = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "socket_descriptor",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetPeerNameRequest'
class GetPeerNameReply(ProtocolBuffer.ProtocolMessage):
has_peer_ip_ = 0
peer_ip_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def peer_ip(self):
if self.peer_ip_ is None:
self.lazy_init_lock_.acquire()
try:
if self.peer_ip_ is None: self.peer_ip_ = AddressPort()
finally:
self.lazy_init_lock_.release()
return self.peer_ip_
def mutable_peer_ip(self): self.has_peer_ip_ = 1; return self.peer_ip()
def clear_peer_ip(self):
if self.has_peer_ip_:
self.has_peer_ip_ = 0;
if self.peer_ip_ is not None: self.peer_ip_.Clear()
def has_peer_ip(self): return self.has_peer_ip_
def MergeFrom(self, x):
assert x is not self
if (x.has_peer_ip()): self.mutable_peer_ip().MergeFrom(x.peer_ip())
def Equals(self, x):
if x is self: return 1
if self.has_peer_ip_ != x.has_peer_ip_: return 0
if self.has_peer_ip_ and self.peer_ip_ != x.peer_ip_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_peer_ip_ and not self.peer_ip_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_peer_ip_): n += 1 + self.lengthString(self.peer_ip_.ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_peer_ip_): n += 1 + self.lengthString(self.peer_ip_.ByteSizePartial())
return n
def Clear(self):
self.clear_peer_ip()
def OutputUnchecked(self, out):
if (self.has_peer_ip_):
out.putVarInt32(18)
out.putVarInt32(self.peer_ip_.ByteSize())
self.peer_ip_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_peer_ip_):
out.putVarInt32(18)
out.putVarInt32(self.peer_ip_.ByteSizePartial())
self.peer_ip_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_peer_ip().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_peer_ip_:
res+=prefix+"peer_ip <\n"
res+=self.peer_ip_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kpeer_ip = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
2: "peer_ip",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetPeerNameReply'
class SocketOption(ProtocolBuffer.ProtocolMessage):
SOCKET_SOL_IP = 0
SOCKET_SOL_SOCKET = 1
SOCKET_SOL_TCP = 6
SOCKET_SOL_UDP = 17
_SocketOptionLevel_NAMES = {
0: "SOCKET_SOL_IP",
1: "SOCKET_SOL_SOCKET",
6: "SOCKET_SOL_TCP",
17: "SOCKET_SOL_UDP",
}
def SocketOptionLevel_Name(cls, x): return cls._SocketOptionLevel_NAMES.get(x, "")
SocketOptionLevel_Name = classmethod(SocketOptionLevel_Name)
SOCKET_SO_DEBUG = 1
SOCKET_SO_REUSEADDR = 2
SOCKET_SO_TYPE = 3
SOCKET_SO_ERROR = 4
SOCKET_SO_DONTROUTE = 5
SOCKET_SO_BROADCAST = 6
SOCKET_SO_SNDBUF = 7
SOCKET_SO_RCVBUF = 8
SOCKET_SO_KEEPALIVE = 9
SOCKET_SO_OOBINLINE = 10
SOCKET_SO_LINGER = 13
SOCKET_SO_RCVTIMEO = 20
SOCKET_SO_SNDTIMEO = 21
SOCKET_IP_TOS = 1
SOCKET_IP_TTL = 2
SOCKET_IP_HDRINCL = 3
SOCKET_IP_OPTIONS = 4
SOCKET_TCP_NODELAY = 1
SOCKET_TCP_MAXSEG = 2
SOCKET_TCP_CORK = 3
SOCKET_TCP_KEEPIDLE = 4
SOCKET_TCP_KEEPINTVL = 5
SOCKET_TCP_KEEPCNT = 6
SOCKET_TCP_SYNCNT = 7
SOCKET_TCP_LINGER2 = 8
SOCKET_TCP_DEFER_ACCEPT = 9
SOCKET_TCP_WINDOW_CLAMP = 10
SOCKET_TCP_INFO = 11
SOCKET_TCP_QUICKACK = 12
_SocketOptionName_NAMES = {
1: "SOCKET_SO_DEBUG",
2: "SOCKET_SO_REUSEADDR",
3: "SOCKET_SO_TYPE",
4: "SOCKET_SO_ERROR",
5: "SOCKET_SO_DONTROUTE",
6: "SOCKET_SO_BROADCAST",
7: "SOCKET_SO_SNDBUF",
8: "SOCKET_SO_RCVBUF",
9: "SOCKET_SO_KEEPALIVE",
10: "SOCKET_SO_OOBINLINE",
13: "SOCKET_SO_LINGER",
20: "SOCKET_SO_RCVTIMEO",
21: "SOCKET_SO_SNDTIMEO",
1: "SOCKET_IP_TOS",
2: "SOCKET_IP_TTL",
3: "SOCKET_IP_HDRINCL",
4: "SOCKET_IP_OPTIONS",
1: "SOCKET_TCP_NODELAY",
2: "SOCKET_TCP_MAXSEG",
3: "SOCKET_TCP_CORK",
4: "SOCKET_TCP_KEEPIDLE",
5: "SOCKET_TCP_KEEPINTVL",
6: "SOCKET_TCP_KEEPCNT",
7: "SOCKET_TCP_SYNCNT",
8: "SOCKET_TCP_LINGER2",
9: "SOCKET_TCP_DEFER_ACCEPT",
10: "SOCKET_TCP_WINDOW_CLAMP",
11: "SOCKET_TCP_INFO",
12: "SOCKET_TCP_QUICKACK",
}
def SocketOptionName_Name(cls, x): return cls._SocketOptionName_NAMES.get(x, "")
SocketOptionName_Name = classmethod(SocketOptionName_Name)
has_level_ = 0
level_ = 0
has_option_ = 0
option_ = 0
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def level(self): return self.level_
def set_level(self, x):
self.has_level_ = 1
self.level_ = x
def clear_level(self):
if self.has_level_:
self.has_level_ = 0
self.level_ = 0
def has_level(self): return self.has_level_
def option(self): return self.option_
def set_option(self, x):
self.has_option_ = 1
self.option_ = x
def clear_option(self):
if self.has_option_:
self.has_option_ = 0
self.option_ = 0
def has_option(self): return self.has_option_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_level()): self.set_level(x.level())
if (x.has_option()): self.set_option(x.option())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_level_ != x.has_level_: return 0
if self.has_level_ and self.level_ != x.level_: return 0
if self.has_option_ != x.has_option_: return 0
if self.has_option_ and self.option_ != x.option_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_level_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: level not set.')
if (not self.has_option_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: option not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.level_)
n += self.lengthVarInt64(self.option_)
n += self.lengthString(len(self.value_))
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_level_):
n += 1
n += self.lengthVarInt64(self.level_)
if (self.has_option_):
n += 1
n += self.lengthVarInt64(self.option_)
if (self.has_value_):
n += 1
n += self.lengthString(len(self.value_))
return n
def Clear(self):
self.clear_level()
self.clear_option()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt32(self.level_)
out.putVarInt32(16)
out.putVarInt32(self.option_)
out.putVarInt32(26)
out.putPrefixedString(self.value_)
def OutputPartial(self, out):
if (self.has_level_):
out.putVarInt32(8)
out.putVarInt32(self.level_)
if (self.has_option_):
out.putVarInt32(16)
out.putVarInt32(self.option_)
if (self.has_value_):
out.putVarInt32(26)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_level(d.getVarInt32())
continue
if tt == 16:
self.set_option(d.getVarInt32())
continue
if tt == 26:
self.set_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_level_: res+=prefix+("level: %s\n" % self.DebugFormatInt32(self.level_))
if self.has_option_: res+=prefix+("option: %s\n" % self.DebugFormatInt32(self.option_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
klevel = 1
koption = 2
kvalue = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "level",
2: "option",
3: "value",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.SocketOption'
class SetSocketOptionsRequest(ProtocolBuffer.ProtocolMessage):
has_socket_descriptor_ = 0
socket_descriptor_ = ""
def __init__(self, contents=None):
self.options_ = []
if contents is not None: self.MergeFromString(contents)
def socket_descriptor(self): return self.socket_descriptor_
def set_socket_descriptor(self, x):
self.has_socket_descriptor_ = 1
self.socket_descriptor_ = x
def clear_socket_descriptor(self):
if self.has_socket_descriptor_:
self.has_socket_descriptor_ = 0
self.socket_descriptor_ = ""
def has_socket_descriptor(self): return self.has_socket_descriptor_
def options_size(self): return len(self.options_)
def options_list(self): return self.options_
def options(self, i):
return self.options_[i]
def mutable_options(self, i):
return self.options_[i]
def add_options(self):
x = SocketOption()
self.options_.append(x)
return x
def clear_options(self):
self.options_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_socket_descriptor()): self.set_socket_descriptor(x.socket_descriptor())
for i in xrange(x.options_size()): self.add_options().CopyFrom(x.options(i))
def Equals(self, x):
if x is self: return 1
if self.has_socket_descriptor_ != x.has_socket_descriptor_: return 0
if self.has_socket_descriptor_ and self.socket_descriptor_ != x.socket_descriptor_: return 0
if len(self.options_) != len(x.options_): return 0
for e1, e2 in zip(self.options_, x.options_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_socket_descriptor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: socket_descriptor not set.')
for p in self.options_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.socket_descriptor_))
n += 1 * len(self.options_)
for i in xrange(len(self.options_)): n += self.lengthString(self.options_[i].ByteSize())
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_socket_descriptor_):
n += 1
n += self.lengthString(len(self.socket_descriptor_))
n += 1 * len(self.options_)
for i in xrange(len(self.options_)): n += self.lengthString(self.options_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_socket_descriptor()
self.clear_options()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
for i in xrange(len(self.options_)):
out.putVarInt32(18)
out.putVarInt32(self.options_[i].ByteSize())
self.options_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
for i in xrange(len(self.options_)):
out.putVarInt32(18)
out.putVarInt32(self.options_[i].ByteSizePartial())
self.options_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_socket_descriptor(d.getPrefixedString())
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_options().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_socket_descriptor_: res+=prefix+("socket_descriptor: %s\n" % self.DebugFormatString(self.socket_descriptor_))
cnt=0
for e in self.options_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("options%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksocket_descriptor = 1
koptions = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "socket_descriptor",
2: "options",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.SetSocketOptionsRequest'
class SetSocketOptionsReply(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.SetSocketOptionsReply'
class GetSocketOptionsRequest(ProtocolBuffer.ProtocolMessage):
has_socket_descriptor_ = 0
socket_descriptor_ = ""
def __init__(self, contents=None):
self.options_ = []
if contents is not None: self.MergeFromString(contents)
def socket_descriptor(self): return self.socket_descriptor_
def set_socket_descriptor(self, x):
self.has_socket_descriptor_ = 1
self.socket_descriptor_ = x
def clear_socket_descriptor(self):
if self.has_socket_descriptor_:
self.has_socket_descriptor_ = 0
self.socket_descriptor_ = ""
def has_socket_descriptor(self): return self.has_socket_descriptor_
def options_size(self): return len(self.options_)
def options_list(self): return self.options_
def options(self, i):
return self.options_[i]
def mutable_options(self, i):
return self.options_[i]
def add_options(self):
x = SocketOption()
self.options_.append(x)
return x
def clear_options(self):
self.options_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_socket_descriptor()): self.set_socket_descriptor(x.socket_descriptor())
for i in xrange(x.options_size()): self.add_options().CopyFrom(x.options(i))
def Equals(self, x):
if x is self: return 1
if self.has_socket_descriptor_ != x.has_socket_descriptor_: return 0
if self.has_socket_descriptor_ and self.socket_descriptor_ != x.socket_descriptor_: return 0
if len(self.options_) != len(x.options_): return 0
for e1, e2 in zip(self.options_, x.options_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_socket_descriptor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: socket_descriptor not set.')
for p in self.options_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.socket_descriptor_))
n += 1 * len(self.options_)
for i in xrange(len(self.options_)): n += self.lengthString(self.options_[i].ByteSize())
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_socket_descriptor_):
n += 1
n += self.lengthString(len(self.socket_descriptor_))
n += 1 * len(self.options_)
for i in xrange(len(self.options_)): n += self.lengthString(self.options_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_socket_descriptor()
self.clear_options()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
for i in xrange(len(self.options_)):
out.putVarInt32(18)
out.putVarInt32(self.options_[i].ByteSize())
self.options_[i].OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
for i in xrange(len(self.options_)):
out.putVarInt32(18)
out.putVarInt32(self.options_[i].ByteSizePartial())
self.options_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_socket_descriptor(d.getPrefixedString())
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_options().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_socket_descriptor_: res+=prefix+("socket_descriptor: %s\n" % self.DebugFormatString(self.socket_descriptor_))
cnt=0
for e in self.options_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("options%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksocket_descriptor = 1
koptions = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "socket_descriptor",
2: "options",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetSocketOptionsRequest'
class GetSocketOptionsReply(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.options_ = []
if contents is not None: self.MergeFromString(contents)
def options_size(self): return len(self.options_)
def options_list(self): return self.options_
def options(self, i):
return self.options_[i]
def mutable_options(self, i):
return self.options_[i]
def add_options(self):
x = SocketOption()
self.options_.append(x)
return x
def clear_options(self):
self.options_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.options_size()): self.add_options().CopyFrom(x.options(i))
def Equals(self, x):
if x is self: return 1
if len(self.options_) != len(x.options_): return 0
for e1, e2 in zip(self.options_, x.options_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.options_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.options_)
for i in xrange(len(self.options_)): n += self.lengthString(self.options_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.options_)
for i in xrange(len(self.options_)): n += self.lengthString(self.options_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_options()
def OutputUnchecked(self, out):
for i in xrange(len(self.options_)):
out.putVarInt32(18)
out.putVarInt32(self.options_[i].ByteSize())
self.options_[i].OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.options_)):
out.putVarInt32(18)
out.putVarInt32(self.options_[i].ByteSizePartial())
self.options_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_options().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.options_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("options%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
koptions = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
2: "options",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.GetSocketOptionsReply'
class ConnectRequest(ProtocolBuffer.ProtocolMessage):
has_socket_descriptor_ = 0
socket_descriptor_ = ""
has_remote_ip_ = 0
has_timeout_seconds_ = 0
timeout_seconds_ = -1.0
def __init__(self, contents=None):
self.remote_ip_ = AddressPort()
if contents is not None: self.MergeFromString(contents)
def socket_descriptor(self): return self.socket_descriptor_
def set_socket_descriptor(self, x):
self.has_socket_descriptor_ = 1
self.socket_descriptor_ = x
def clear_socket_descriptor(self):
if self.has_socket_descriptor_:
self.has_socket_descriptor_ = 0
self.socket_descriptor_ = ""
def has_socket_descriptor(self): return self.has_socket_descriptor_
def remote_ip(self): return self.remote_ip_
def mutable_remote_ip(self): self.has_remote_ip_ = 1; return self.remote_ip_
def clear_remote_ip(self):self.has_remote_ip_ = 0; self.remote_ip_.Clear()
def has_remote_ip(self): return self.has_remote_ip_
def timeout_seconds(self): return self.timeout_seconds_
def set_timeout_seconds(self, x):
self.has_timeout_seconds_ = 1
self.timeout_seconds_ = x
def clear_timeout_seconds(self):
if self.has_timeout_seconds_:
self.has_timeout_seconds_ = 0
self.timeout_seconds_ = -1.0
def has_timeout_seconds(self): return self.has_timeout_seconds_
def MergeFrom(self, x):
assert x is not self
if (x.has_socket_descriptor()): self.set_socket_descriptor(x.socket_descriptor())
if (x.has_remote_ip()): self.mutable_remote_ip().MergeFrom(x.remote_ip())
if (x.has_timeout_seconds()): self.set_timeout_seconds(x.timeout_seconds())
def Equals(self, x):
if x is self: return 1
if self.has_socket_descriptor_ != x.has_socket_descriptor_: return 0
if self.has_socket_descriptor_ and self.socket_descriptor_ != x.socket_descriptor_: return 0
if self.has_remote_ip_ != x.has_remote_ip_: return 0
if self.has_remote_ip_ and self.remote_ip_ != x.remote_ip_: return 0
if self.has_timeout_seconds_ != x.has_timeout_seconds_: return 0
if self.has_timeout_seconds_ and self.timeout_seconds_ != x.timeout_seconds_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_socket_descriptor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: socket_descriptor not set.')
if (not self.has_remote_ip_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: remote_ip not set.')
elif not self.remote_ip_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.socket_descriptor_))
n += self.lengthString(self.remote_ip_.ByteSize())
if (self.has_timeout_seconds_): n += 9
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_socket_descriptor_):
n += 1
n += self.lengthString(len(self.socket_descriptor_))
if (self.has_remote_ip_):
n += 1
n += self.lengthString(self.remote_ip_.ByteSizePartial())
if (self.has_timeout_seconds_): n += 9
return n
def Clear(self):
self.clear_socket_descriptor()
self.clear_remote_ip()
self.clear_timeout_seconds()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
out.putVarInt32(18)
out.putVarInt32(self.remote_ip_.ByteSize())
self.remote_ip_.OutputUnchecked(out)
if (self.has_timeout_seconds_):
out.putVarInt32(25)
out.putDouble(self.timeout_seconds_)
def OutputPartial(self, out):
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
if (self.has_remote_ip_):
out.putVarInt32(18)
out.putVarInt32(self.remote_ip_.ByteSizePartial())
self.remote_ip_.OutputPartial(out)
if (self.has_timeout_seconds_):
out.putVarInt32(25)
out.putDouble(self.timeout_seconds_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_socket_descriptor(d.getPrefixedString())
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_remote_ip().TryMerge(tmp)
continue
if tt == 25:
self.set_timeout_seconds(d.getDouble())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_socket_descriptor_: res+=prefix+("socket_descriptor: %s\n" % self.DebugFormatString(self.socket_descriptor_))
if self.has_remote_ip_:
res+=prefix+"remote_ip <\n"
res+=self.remote_ip_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_timeout_seconds_: res+=prefix+("timeout_seconds: %s\n" % self.DebugFormat(self.timeout_seconds_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksocket_descriptor = 1
kremote_ip = 2
ktimeout_seconds = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "socket_descriptor",
2: "remote_ip",
3: "timeout_seconds",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.DOUBLE,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.ConnectRequest'
class ConnectReply(_ExtendableProtocolMessage):
has_proxy_external_ip_ = 0
proxy_external_ip_ = None
def __init__(self, contents=None):
if _extension_runtime:
self._extension_fields = {}
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def proxy_external_ip(self):
if self.proxy_external_ip_ is None:
self.lazy_init_lock_.acquire()
try:
if self.proxy_external_ip_ is None: self.proxy_external_ip_ = AddressPort()
finally:
self.lazy_init_lock_.release()
return self.proxy_external_ip_
def mutable_proxy_external_ip(self): self.has_proxy_external_ip_ = 1; return self.proxy_external_ip()
def clear_proxy_external_ip(self):
if self.has_proxy_external_ip_:
self.has_proxy_external_ip_ = 0;
if self.proxy_external_ip_ is not None: self.proxy_external_ip_.Clear()
def has_proxy_external_ip(self): return self.has_proxy_external_ip_
def MergeFrom(self, x):
assert x is not self
if (x.has_proxy_external_ip()): self.mutable_proxy_external_ip().MergeFrom(x.proxy_external_ip())
if _extension_runtime: self._MergeExtensionFields(x)
def Equals(self, x):
if x is self: return 1
if self.has_proxy_external_ip_ != x.has_proxy_external_ip_: return 0
if self.has_proxy_external_ip_ and self.proxy_external_ip_ != x.proxy_external_ip_: return 0
if _extension_runtime and not self._ExtensionEquals(x): return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_proxy_external_ip_ and not self.proxy_external_ip_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_proxy_external_ip_): n += 1 + self.lengthString(self.proxy_external_ip_.ByteSize())
if _extension_runtime:
n += self._ExtensionByteSize(False)
return n
def ByteSizePartial(self):
n = 0
if (self.has_proxy_external_ip_): n += 1 + self.lengthString(self.proxy_external_ip_.ByteSizePartial())
if _extension_runtime:
n += self._ExtensionByteSize(True)
return n
def Clear(self):
self.clear_proxy_external_ip()
if _extension_runtime: self._extension_fields.clear()
def OutputUnchecked(self, out):
if _extension_runtime:
extensions = self._ListExtensions()
extension_index = 0
if (self.has_proxy_external_ip_):
out.putVarInt32(10)
out.putVarInt32(self.proxy_external_ip_.ByteSize())
self.proxy_external_ip_.OutputUnchecked(out)
if _extension_runtime:
extension_index = self._OutputExtensionFields(out, False, extensions, extension_index, 536870912)
def OutputPartial(self, out):
if _extension_runtime:
extensions = self._ListExtensions()
extension_index = 0
if (self.has_proxy_external_ip_):
out.putVarInt32(10)
out.putVarInt32(self.proxy_external_ip_.ByteSizePartial())
self.proxy_external_ip_.OutputPartial(out)
if _extension_runtime:
extension_index = self._OutputExtensionFields(out, True, extensions, extension_index, 536870912)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_proxy_external_ip().TryMerge(tmp)
continue
if _extension_runtime:
if (1000 <= tt):
self._ParseOneExtensionField(tt, d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_proxy_external_ip_:
res+=prefix+"proxy_external_ip <\n"
res+=self.proxy_external_ip_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if _extension_runtime:
res+=self._ExtensionDebugString(prefix, printElemNumber)
return res
if _extension_runtime:
_extensions_by_field_number = {}
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kproxy_external_ip = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "proxy_external_ip",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.ConnectReply'
class ListenRequest(ProtocolBuffer.ProtocolMessage):
has_socket_descriptor_ = 0
socket_descriptor_ = ""
has_backlog_ = 0
backlog_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def socket_descriptor(self): return self.socket_descriptor_
def set_socket_descriptor(self, x):
self.has_socket_descriptor_ = 1
self.socket_descriptor_ = x
def clear_socket_descriptor(self):
if self.has_socket_descriptor_:
self.has_socket_descriptor_ = 0
self.socket_descriptor_ = ""
def has_socket_descriptor(self): return self.has_socket_descriptor_
def backlog(self): return self.backlog_
def set_backlog(self, x):
self.has_backlog_ = 1
self.backlog_ = x
def clear_backlog(self):
if self.has_backlog_:
self.has_backlog_ = 0
self.backlog_ = 0
def has_backlog(self): return self.has_backlog_
def MergeFrom(self, x):
assert x is not self
if (x.has_socket_descriptor()): self.set_socket_descriptor(x.socket_descriptor())
if (x.has_backlog()): self.set_backlog(x.backlog())
def Equals(self, x):
if x is self: return 1
if self.has_socket_descriptor_ != x.has_socket_descriptor_: return 0
if self.has_socket_descriptor_ and self.socket_descriptor_ != x.socket_descriptor_: return 0
if self.has_backlog_ != x.has_backlog_: return 0
if self.has_backlog_ and self.backlog_ != x.backlog_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_socket_descriptor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: socket_descriptor not set.')
if (not self.has_backlog_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: backlog not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.socket_descriptor_))
n += self.lengthVarInt64(self.backlog_)
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_socket_descriptor_):
n += 1
n += self.lengthString(len(self.socket_descriptor_))
if (self.has_backlog_):
n += 1
n += self.lengthVarInt64(self.backlog_)
return n
def Clear(self):
self.clear_socket_descriptor()
self.clear_backlog()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
out.putVarInt32(16)
out.putVarInt32(self.backlog_)
def OutputPartial(self, out):
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
if (self.has_backlog_):
out.putVarInt32(16)
out.putVarInt32(self.backlog_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_socket_descriptor(d.getPrefixedString())
continue
if tt == 16:
self.set_backlog(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_socket_descriptor_: res+=prefix+("socket_descriptor: %s\n" % self.DebugFormatString(self.socket_descriptor_))
if self.has_backlog_: res+=prefix+("backlog: %s\n" % self.DebugFormatInt32(self.backlog_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksocket_descriptor = 1
kbacklog = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "socket_descriptor",
2: "backlog",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.ListenRequest'
class ListenReply(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.ListenReply'
class AcceptRequest(ProtocolBuffer.ProtocolMessage):
has_socket_descriptor_ = 0
socket_descriptor_ = ""
has_timeout_seconds_ = 0
timeout_seconds_ = -1.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def socket_descriptor(self): return self.socket_descriptor_
def set_socket_descriptor(self, x):
self.has_socket_descriptor_ = 1
self.socket_descriptor_ = x
def clear_socket_descriptor(self):
if self.has_socket_descriptor_:
self.has_socket_descriptor_ = 0
self.socket_descriptor_ = ""
def has_socket_descriptor(self): return self.has_socket_descriptor_
def timeout_seconds(self): return self.timeout_seconds_
def set_timeout_seconds(self, x):
self.has_timeout_seconds_ = 1
self.timeout_seconds_ = x
def clear_timeout_seconds(self):
if self.has_timeout_seconds_:
self.has_timeout_seconds_ = 0
self.timeout_seconds_ = -1.0
def has_timeout_seconds(self): return self.has_timeout_seconds_
def MergeFrom(self, x):
assert x is not self
if (x.has_socket_descriptor()): self.set_socket_descriptor(x.socket_descriptor())
if (x.has_timeout_seconds()): self.set_timeout_seconds(x.timeout_seconds())
def Equals(self, x):
if x is self: return 1
if self.has_socket_descriptor_ != x.has_socket_descriptor_: return 0
if self.has_socket_descriptor_ and self.socket_descriptor_ != x.socket_descriptor_: return 0
if self.has_timeout_seconds_ != x.has_timeout_seconds_: return 0
if self.has_timeout_seconds_ and self.timeout_seconds_ != x.timeout_seconds_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_socket_descriptor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: socket_descriptor not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.socket_descriptor_))
if (self.has_timeout_seconds_): n += 9
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_socket_descriptor_):
n += 1
n += self.lengthString(len(self.socket_descriptor_))
if (self.has_timeout_seconds_): n += 9
return n
def Clear(self):
self.clear_socket_descriptor()
self.clear_timeout_seconds()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
if (self.has_timeout_seconds_):
out.putVarInt32(17)
out.putDouble(self.timeout_seconds_)
def OutputPartial(self, out):
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
if (self.has_timeout_seconds_):
out.putVarInt32(17)
out.putDouble(self.timeout_seconds_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_socket_descriptor(d.getPrefixedString())
continue
if tt == 17:
self.set_timeout_seconds(d.getDouble())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_socket_descriptor_: res+=prefix+("socket_descriptor: %s\n" % self.DebugFormatString(self.socket_descriptor_))
if self.has_timeout_seconds_: res+=prefix+("timeout_seconds: %s\n" % self.DebugFormat(self.timeout_seconds_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksocket_descriptor = 1
ktimeout_seconds = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "socket_descriptor",
2: "timeout_seconds",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.DOUBLE,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.AcceptRequest'
class AcceptReply(ProtocolBuffer.ProtocolMessage):
has_new_socket_descriptor_ = 0
new_socket_descriptor_ = ""
has_remote_address_ = 0
remote_address_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def new_socket_descriptor(self): return self.new_socket_descriptor_
def set_new_socket_descriptor(self, x):
self.has_new_socket_descriptor_ = 1
self.new_socket_descriptor_ = x
def clear_new_socket_descriptor(self):
if self.has_new_socket_descriptor_:
self.has_new_socket_descriptor_ = 0
self.new_socket_descriptor_ = ""
def has_new_socket_descriptor(self): return self.has_new_socket_descriptor_
def remote_address(self):
if self.remote_address_ is None:
self.lazy_init_lock_.acquire()
try:
if self.remote_address_ is None: self.remote_address_ = AddressPort()
finally:
self.lazy_init_lock_.release()
return self.remote_address_
def mutable_remote_address(self): self.has_remote_address_ = 1; return self.remote_address()
def clear_remote_address(self):
if self.has_remote_address_:
self.has_remote_address_ = 0;
if self.remote_address_ is not None: self.remote_address_.Clear()
def has_remote_address(self): return self.has_remote_address_
def MergeFrom(self, x):
assert x is not self
if (x.has_new_socket_descriptor()): self.set_new_socket_descriptor(x.new_socket_descriptor())
if (x.has_remote_address()): self.mutable_remote_address().MergeFrom(x.remote_address())
def Equals(self, x):
if x is self: return 1
if self.has_new_socket_descriptor_ != x.has_new_socket_descriptor_: return 0
if self.has_new_socket_descriptor_ and self.new_socket_descriptor_ != x.new_socket_descriptor_: return 0
if self.has_remote_address_ != x.has_remote_address_: return 0
if self.has_remote_address_ and self.remote_address_ != x.remote_address_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_remote_address_ and not self.remote_address_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_new_socket_descriptor_): n += 1 + self.lengthString(len(self.new_socket_descriptor_))
if (self.has_remote_address_): n += 1 + self.lengthString(self.remote_address_.ByteSize())
return n
def ByteSizePartial(self):
n = 0
if (self.has_new_socket_descriptor_): n += 1 + self.lengthString(len(self.new_socket_descriptor_))
if (self.has_remote_address_): n += 1 + self.lengthString(self.remote_address_.ByteSizePartial())
return n
def Clear(self):
self.clear_new_socket_descriptor()
self.clear_remote_address()
def OutputUnchecked(self, out):
if (self.has_new_socket_descriptor_):
out.putVarInt32(18)
out.putPrefixedString(self.new_socket_descriptor_)
if (self.has_remote_address_):
out.putVarInt32(26)
out.putVarInt32(self.remote_address_.ByteSize())
self.remote_address_.OutputUnchecked(out)
def OutputPartial(self, out):
if (self.has_new_socket_descriptor_):
out.putVarInt32(18)
out.putPrefixedString(self.new_socket_descriptor_)
if (self.has_remote_address_):
out.putVarInt32(26)
out.putVarInt32(self.remote_address_.ByteSizePartial())
self.remote_address_.OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 18:
self.set_new_socket_descriptor(d.getPrefixedString())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_remote_address().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_new_socket_descriptor_: res+=prefix+("new_socket_descriptor: %s\n" % self.DebugFormatString(self.new_socket_descriptor_))
if self.has_remote_address_:
res+=prefix+"remote_address <\n"
res+=self.remote_address_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
knew_socket_descriptor = 2
kremote_address = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
2: "new_socket_descriptor",
3: "remote_address",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.AcceptReply'
class ShutDownRequest(ProtocolBuffer.ProtocolMessage):
SOCKET_SHUT_RD = 1
SOCKET_SHUT_WR = 2
SOCKET_SHUT_RDWR = 3
_How_NAMES = {
1: "SOCKET_SHUT_RD",
2: "SOCKET_SHUT_WR",
3: "SOCKET_SHUT_RDWR",
}
def How_Name(cls, x): return cls._How_NAMES.get(x, "")
How_Name = classmethod(How_Name)
has_socket_descriptor_ = 0
socket_descriptor_ = ""
has_how_ = 0
how_ = 0
has_send_offset_ = 0
send_offset_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def socket_descriptor(self): return self.socket_descriptor_
def set_socket_descriptor(self, x):
self.has_socket_descriptor_ = 1
self.socket_descriptor_ = x
def clear_socket_descriptor(self):
if self.has_socket_descriptor_:
self.has_socket_descriptor_ = 0
self.socket_descriptor_ = ""
def has_socket_descriptor(self): return self.has_socket_descriptor_
def how(self): return self.how_
def set_how(self, x):
self.has_how_ = 1
self.how_ = x
def clear_how(self):
if self.has_how_:
self.has_how_ = 0
self.how_ = 0
def has_how(self): return self.has_how_
def send_offset(self): return self.send_offset_
def set_send_offset(self, x):
self.has_send_offset_ = 1
self.send_offset_ = x
def clear_send_offset(self):
if self.has_send_offset_:
self.has_send_offset_ = 0
self.send_offset_ = 0
def has_send_offset(self): return self.has_send_offset_
def MergeFrom(self, x):
assert x is not self
if (x.has_socket_descriptor()): self.set_socket_descriptor(x.socket_descriptor())
if (x.has_how()): self.set_how(x.how())
if (x.has_send_offset()): self.set_send_offset(x.send_offset())
def Equals(self, x):
if x is self: return 1
if self.has_socket_descriptor_ != x.has_socket_descriptor_: return 0
if self.has_socket_descriptor_ and self.socket_descriptor_ != x.socket_descriptor_: return 0
if self.has_how_ != x.has_how_: return 0
if self.has_how_ and self.how_ != x.how_: return 0
if self.has_send_offset_ != x.has_send_offset_: return 0
if self.has_send_offset_ and self.send_offset_ != x.send_offset_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_socket_descriptor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: socket_descriptor not set.')
if (not self.has_how_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: how not set.')
if (not self.has_send_offset_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: send_offset not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.socket_descriptor_))
n += self.lengthVarInt64(self.how_)
n += self.lengthVarInt64(self.send_offset_)
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_socket_descriptor_):
n += 1
n += self.lengthString(len(self.socket_descriptor_))
if (self.has_how_):
n += 1
n += self.lengthVarInt64(self.how_)
if (self.has_send_offset_):
n += 1
n += self.lengthVarInt64(self.send_offset_)
return n
def Clear(self):
self.clear_socket_descriptor()
self.clear_how()
self.clear_send_offset()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
out.putVarInt32(16)
out.putVarInt32(self.how_)
out.putVarInt32(24)
out.putVarInt64(self.send_offset_)
def OutputPartial(self, out):
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
if (self.has_how_):
out.putVarInt32(16)
out.putVarInt32(self.how_)
if (self.has_send_offset_):
out.putVarInt32(24)
out.putVarInt64(self.send_offset_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_socket_descriptor(d.getPrefixedString())
continue
if tt == 16:
self.set_how(d.getVarInt32())
continue
if tt == 24:
self.set_send_offset(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_socket_descriptor_: res+=prefix+("socket_descriptor: %s\n" % self.DebugFormatString(self.socket_descriptor_))
if self.has_how_: res+=prefix+("how: %s\n" % self.DebugFormatInt32(self.how_))
if self.has_send_offset_: res+=prefix+("send_offset: %s\n" % self.DebugFormatInt64(self.send_offset_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksocket_descriptor = 1
khow = 2
ksend_offset = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "socket_descriptor",
2: "how",
3: "send_offset",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.ShutDownRequest'
class ShutDownReply(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.ShutDownReply'
class CloseRequest(ProtocolBuffer.ProtocolMessage):
has_socket_descriptor_ = 0
socket_descriptor_ = ""
has_send_offset_ = 0
send_offset_ = -1
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def socket_descriptor(self): return self.socket_descriptor_
def set_socket_descriptor(self, x):
self.has_socket_descriptor_ = 1
self.socket_descriptor_ = x
def clear_socket_descriptor(self):
if self.has_socket_descriptor_:
self.has_socket_descriptor_ = 0
self.socket_descriptor_ = ""
def has_socket_descriptor(self): return self.has_socket_descriptor_
def send_offset(self): return self.send_offset_
def set_send_offset(self, x):
self.has_send_offset_ = 1
self.send_offset_ = x
def clear_send_offset(self):
if self.has_send_offset_:
self.has_send_offset_ = 0
self.send_offset_ = -1
def has_send_offset(self): return self.has_send_offset_
def MergeFrom(self, x):
assert x is not self
if (x.has_socket_descriptor()): self.set_socket_descriptor(x.socket_descriptor())
if (x.has_send_offset()): self.set_send_offset(x.send_offset())
def Equals(self, x):
if x is self: return 1
if self.has_socket_descriptor_ != x.has_socket_descriptor_: return 0
if self.has_socket_descriptor_ and self.socket_descriptor_ != x.socket_descriptor_: return 0
if self.has_send_offset_ != x.has_send_offset_: return 0
if self.has_send_offset_ and self.send_offset_ != x.send_offset_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_socket_descriptor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: socket_descriptor not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.socket_descriptor_))
if (self.has_send_offset_): n += 1 + self.lengthVarInt64(self.send_offset_)
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_socket_descriptor_):
n += 1
n += self.lengthString(len(self.socket_descriptor_))
if (self.has_send_offset_): n += 1 + self.lengthVarInt64(self.send_offset_)
return n
def Clear(self):
self.clear_socket_descriptor()
self.clear_send_offset()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
if (self.has_send_offset_):
out.putVarInt32(16)
out.putVarInt64(self.send_offset_)
def OutputPartial(self, out):
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
if (self.has_send_offset_):
out.putVarInt32(16)
out.putVarInt64(self.send_offset_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_socket_descriptor(d.getPrefixedString())
continue
if tt == 16:
self.set_send_offset(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_socket_descriptor_: res+=prefix+("socket_descriptor: %s\n" % self.DebugFormatString(self.socket_descriptor_))
if self.has_send_offset_: res+=prefix+("send_offset: %s\n" % self.DebugFormatInt64(self.send_offset_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksocket_descriptor = 1
ksend_offset = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "socket_descriptor",
2: "send_offset",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.CloseRequest'
class CloseReply(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n
def ByteSizePartial(self):
n = 0
return n
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def OutputPartial(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
}, 0)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
}, 0, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.CloseReply'
class SendRequest(ProtocolBuffer.ProtocolMessage):
has_socket_descriptor_ = 0
socket_descriptor_ = ""
has_data_ = 0
data_ = ""
has_stream_offset_ = 0
stream_offset_ = 0
has_flags_ = 0
flags_ = 0
has_send_to_ = 0
send_to_ = None
has_timeout_seconds_ = 0
timeout_seconds_ = -1.0
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def socket_descriptor(self): return self.socket_descriptor_
def set_socket_descriptor(self, x):
self.has_socket_descriptor_ = 1
self.socket_descriptor_ = x
def clear_socket_descriptor(self):
if self.has_socket_descriptor_:
self.has_socket_descriptor_ = 0
self.socket_descriptor_ = ""
def has_socket_descriptor(self): return self.has_socket_descriptor_
def data(self): return self.data_
def set_data(self, x):
self.has_data_ = 1
self.data_ = x
def clear_data(self):
if self.has_data_:
self.has_data_ = 0
self.data_ = ""
def has_data(self): return self.has_data_
def stream_offset(self): return self.stream_offset_
def set_stream_offset(self, x):
self.has_stream_offset_ = 1
self.stream_offset_ = x
def clear_stream_offset(self):
if self.has_stream_offset_:
self.has_stream_offset_ = 0
self.stream_offset_ = 0
def has_stream_offset(self): return self.has_stream_offset_
def flags(self): return self.flags_
def set_flags(self, x):
self.has_flags_ = 1
self.flags_ = x
def clear_flags(self):
if self.has_flags_:
self.has_flags_ = 0
self.flags_ = 0
def has_flags(self): return self.has_flags_
def send_to(self):
if self.send_to_ is None:
self.lazy_init_lock_.acquire()
try:
if self.send_to_ is None: self.send_to_ = AddressPort()
finally:
self.lazy_init_lock_.release()
return self.send_to_
def mutable_send_to(self): self.has_send_to_ = 1; return self.send_to()
def clear_send_to(self):
if self.has_send_to_:
self.has_send_to_ = 0;
if self.send_to_ is not None: self.send_to_.Clear()
def has_send_to(self): return self.has_send_to_
def timeout_seconds(self): return self.timeout_seconds_
def set_timeout_seconds(self, x):
self.has_timeout_seconds_ = 1
self.timeout_seconds_ = x
def clear_timeout_seconds(self):
if self.has_timeout_seconds_:
self.has_timeout_seconds_ = 0
self.timeout_seconds_ = -1.0
def has_timeout_seconds(self): return self.has_timeout_seconds_
def MergeFrom(self, x):
assert x is not self
if (x.has_socket_descriptor()): self.set_socket_descriptor(x.socket_descriptor())
if (x.has_data()): self.set_data(x.data())
if (x.has_stream_offset()): self.set_stream_offset(x.stream_offset())
if (x.has_flags()): self.set_flags(x.flags())
if (x.has_send_to()): self.mutable_send_to().MergeFrom(x.send_to())
if (x.has_timeout_seconds()): self.set_timeout_seconds(x.timeout_seconds())
def Equals(self, x):
if x is self: return 1
if self.has_socket_descriptor_ != x.has_socket_descriptor_: return 0
if self.has_socket_descriptor_ and self.socket_descriptor_ != x.socket_descriptor_: return 0
if self.has_data_ != x.has_data_: return 0
if self.has_data_ and self.data_ != x.data_: return 0
if self.has_stream_offset_ != x.has_stream_offset_: return 0
if self.has_stream_offset_ and self.stream_offset_ != x.stream_offset_: return 0
if self.has_flags_ != x.has_flags_: return 0
if self.has_flags_ and self.flags_ != x.flags_: return 0
if self.has_send_to_ != x.has_send_to_: return 0
if self.has_send_to_ and self.send_to_ != x.send_to_: return 0
if self.has_timeout_seconds_ != x.has_timeout_seconds_: return 0
if self.has_timeout_seconds_ and self.timeout_seconds_ != x.timeout_seconds_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_socket_descriptor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: socket_descriptor not set.')
if (not self.has_data_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: data not set.')
if (not self.has_stream_offset_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: stream_offset not set.')
if (self.has_send_to_ and not self.send_to_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.socket_descriptor_))
n += self.lengthString(len(self.data_))
n += self.lengthVarInt64(self.stream_offset_)
if (self.has_flags_): n += 1 + self.lengthVarInt64(self.flags_)
if (self.has_send_to_): n += 1 + self.lengthString(self.send_to_.ByteSize())
if (self.has_timeout_seconds_): n += 9
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_socket_descriptor_):
n += 1
n += self.lengthString(len(self.socket_descriptor_))
if (self.has_data_):
n += 1
n += self.lengthString(len(self.data_))
if (self.has_stream_offset_):
n += 1
n += self.lengthVarInt64(self.stream_offset_)
if (self.has_flags_): n += 1 + self.lengthVarInt64(self.flags_)
if (self.has_send_to_): n += 1 + self.lengthString(self.send_to_.ByteSizePartial())
if (self.has_timeout_seconds_): n += 9
return n
def Clear(self):
self.clear_socket_descriptor()
self.clear_data()
self.clear_stream_offset()
self.clear_flags()
self.clear_send_to()
self.clear_timeout_seconds()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
out.putVarInt32(18)
out.putPrefixedString(self.data_)
out.putVarInt32(24)
out.putVarInt64(self.stream_offset_)
if (self.has_flags_):
out.putVarInt32(32)
out.putVarInt32(self.flags_)
if (self.has_send_to_):
out.putVarInt32(42)
out.putVarInt32(self.send_to_.ByteSize())
self.send_to_.OutputUnchecked(out)
if (self.has_timeout_seconds_):
out.putVarInt32(49)
out.putDouble(self.timeout_seconds_)
def OutputPartial(self, out):
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
if (self.has_data_):
out.putVarInt32(18)
out.putPrefixedString(self.data_)
if (self.has_stream_offset_):
out.putVarInt32(24)
out.putVarInt64(self.stream_offset_)
if (self.has_flags_):
out.putVarInt32(32)
out.putVarInt32(self.flags_)
if (self.has_send_to_):
out.putVarInt32(42)
out.putVarInt32(self.send_to_.ByteSizePartial())
self.send_to_.OutputPartial(out)
if (self.has_timeout_seconds_):
out.putVarInt32(49)
out.putDouble(self.timeout_seconds_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_socket_descriptor(d.getPrefixedString())
continue
if tt == 18:
self.set_data(d.getPrefixedString())
continue
if tt == 24:
self.set_stream_offset(d.getVarInt64())
continue
if tt == 32:
self.set_flags(d.getVarInt32())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_send_to().TryMerge(tmp)
continue
if tt == 49:
self.set_timeout_seconds(d.getDouble())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_socket_descriptor_: res+=prefix+("socket_descriptor: %s\n" % self.DebugFormatString(self.socket_descriptor_))
if self.has_data_: res+=prefix+("data: %s\n" % self.DebugFormatString(self.data_))
if self.has_stream_offset_: res+=prefix+("stream_offset: %s\n" % self.DebugFormatInt64(self.stream_offset_))
if self.has_flags_: res+=prefix+("flags: %s\n" % self.DebugFormatInt32(self.flags_))
if self.has_send_to_:
res+=prefix+"send_to <\n"
res+=self.send_to_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_timeout_seconds_: res+=prefix+("timeout_seconds: %s\n" % self.DebugFormat(self.timeout_seconds_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksocket_descriptor = 1
kdata = 2
kstream_offset = 3
kflags = 4
ksend_to = 5
ktimeout_seconds = 6
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "socket_descriptor",
2: "data",
3: "stream_offset",
4: "flags",
5: "send_to",
6: "timeout_seconds",
}, 6)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.NUMERIC,
4: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.STRING,
6: ProtocolBuffer.Encoder.DOUBLE,
}, 6, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.SendRequest'
class SendReply(ProtocolBuffer.ProtocolMessage):
has_data_sent_ = 0
data_sent_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def data_sent(self): return self.data_sent_
def set_data_sent(self, x):
self.has_data_sent_ = 1
self.data_sent_ = x
def clear_data_sent(self):
if self.has_data_sent_:
self.has_data_sent_ = 0
self.data_sent_ = 0
def has_data_sent(self): return self.has_data_sent_
def MergeFrom(self, x):
assert x is not self
if (x.has_data_sent()): self.set_data_sent(x.data_sent())
def Equals(self, x):
if x is self: return 1
if self.has_data_sent_ != x.has_data_sent_: return 0
if self.has_data_sent_ and self.data_sent_ != x.data_sent_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_data_sent_): n += 1 + self.lengthVarInt64(self.data_sent_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_data_sent_): n += 1 + self.lengthVarInt64(self.data_sent_)
return n
def Clear(self):
self.clear_data_sent()
def OutputUnchecked(self, out):
if (self.has_data_sent_):
out.putVarInt32(8)
out.putVarInt32(self.data_sent_)
def OutputPartial(self, out):
if (self.has_data_sent_):
out.putVarInt32(8)
out.putVarInt32(self.data_sent_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_data_sent(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_data_sent_: res+=prefix+("data_sent: %s\n" % self.DebugFormatInt32(self.data_sent_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kdata_sent = 1
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "data_sent",
}, 1)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.NUMERIC,
}, 1, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.SendReply'
class ReceiveRequest(ProtocolBuffer.ProtocolMessage):
MSG_OOB = 1
MSG_PEEK = 2
_Flags_NAMES = {
1: "MSG_OOB",
2: "MSG_PEEK",
}
def Flags_Name(cls, x): return cls._Flags_NAMES.get(x, "")
Flags_Name = classmethod(Flags_Name)
has_socket_descriptor_ = 0
socket_descriptor_ = ""
has_data_size_ = 0
data_size_ = 0
has_flags_ = 0
flags_ = 0
has_timeout_seconds_ = 0
timeout_seconds_ = -1.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def socket_descriptor(self): return self.socket_descriptor_
def set_socket_descriptor(self, x):
self.has_socket_descriptor_ = 1
self.socket_descriptor_ = x
def clear_socket_descriptor(self):
if self.has_socket_descriptor_:
self.has_socket_descriptor_ = 0
self.socket_descriptor_ = ""
def has_socket_descriptor(self): return self.has_socket_descriptor_
def data_size(self): return self.data_size_
def set_data_size(self, x):
self.has_data_size_ = 1
self.data_size_ = x
def clear_data_size(self):
if self.has_data_size_:
self.has_data_size_ = 0
self.data_size_ = 0
def has_data_size(self): return self.has_data_size_
def flags(self): return self.flags_
def set_flags(self, x):
self.has_flags_ = 1
self.flags_ = x
def clear_flags(self):
if self.has_flags_:
self.has_flags_ = 0
self.flags_ = 0
def has_flags(self): return self.has_flags_
def timeout_seconds(self): return self.timeout_seconds_
def set_timeout_seconds(self, x):
self.has_timeout_seconds_ = 1
self.timeout_seconds_ = x
def clear_timeout_seconds(self):
if self.has_timeout_seconds_:
self.has_timeout_seconds_ = 0
self.timeout_seconds_ = -1.0
def has_timeout_seconds(self): return self.has_timeout_seconds_
def MergeFrom(self, x):
assert x is not self
if (x.has_socket_descriptor()): self.set_socket_descriptor(x.socket_descriptor())
if (x.has_data_size()): self.set_data_size(x.data_size())
if (x.has_flags()): self.set_flags(x.flags())
if (x.has_timeout_seconds()): self.set_timeout_seconds(x.timeout_seconds())
def Equals(self, x):
if x is self: return 1
if self.has_socket_descriptor_ != x.has_socket_descriptor_: return 0
if self.has_socket_descriptor_ and self.socket_descriptor_ != x.socket_descriptor_: return 0
if self.has_data_size_ != x.has_data_size_: return 0
if self.has_data_size_ and self.data_size_ != x.data_size_: return 0
if self.has_flags_ != x.has_flags_: return 0
if self.has_flags_ and self.flags_ != x.flags_: return 0
if self.has_timeout_seconds_ != x.has_timeout_seconds_: return 0
if self.has_timeout_seconds_ and self.timeout_seconds_ != x.timeout_seconds_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_socket_descriptor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: socket_descriptor not set.')
if (not self.has_data_size_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: data_size not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.socket_descriptor_))
n += self.lengthVarInt64(self.data_size_)
if (self.has_flags_): n += 1 + self.lengthVarInt64(self.flags_)
if (self.has_timeout_seconds_): n += 9
return n + 2
def ByteSizePartial(self):
n = 0
if (self.has_socket_descriptor_):
n += 1
n += self.lengthString(len(self.socket_descriptor_))
if (self.has_data_size_):
n += 1
n += self.lengthVarInt64(self.data_size_)
if (self.has_flags_): n += 1 + self.lengthVarInt64(self.flags_)
if (self.has_timeout_seconds_): n += 9
return n
def Clear(self):
self.clear_socket_descriptor()
self.clear_data_size()
self.clear_flags()
self.clear_timeout_seconds()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
out.putVarInt32(16)
out.putVarInt32(self.data_size_)
if (self.has_flags_):
out.putVarInt32(24)
out.putVarInt32(self.flags_)
if (self.has_timeout_seconds_):
out.putVarInt32(41)
out.putDouble(self.timeout_seconds_)
def OutputPartial(self, out):
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
if (self.has_data_size_):
out.putVarInt32(16)
out.putVarInt32(self.data_size_)
if (self.has_flags_):
out.putVarInt32(24)
out.putVarInt32(self.flags_)
if (self.has_timeout_seconds_):
out.putVarInt32(41)
out.putDouble(self.timeout_seconds_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_socket_descriptor(d.getPrefixedString())
continue
if tt == 16:
self.set_data_size(d.getVarInt32())
continue
if tt == 24:
self.set_flags(d.getVarInt32())
continue
if tt == 41:
self.set_timeout_seconds(d.getDouble())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_socket_descriptor_: res+=prefix+("socket_descriptor: %s\n" % self.DebugFormatString(self.socket_descriptor_))
if self.has_data_size_: res+=prefix+("data_size: %s\n" % self.DebugFormatInt32(self.data_size_))
if self.has_flags_: res+=prefix+("flags: %s\n" % self.DebugFormatInt32(self.flags_))
if self.has_timeout_seconds_: res+=prefix+("timeout_seconds: %s\n" % self.DebugFormat(self.timeout_seconds_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksocket_descriptor = 1
kdata_size = 2
kflags = 3
ktimeout_seconds = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "socket_descriptor",
2: "data_size",
3: "flags",
5: "timeout_seconds",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
5: ProtocolBuffer.Encoder.DOUBLE,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.ReceiveRequest'
class ReceiveReply(ProtocolBuffer.ProtocolMessage):
has_stream_offset_ = 0
stream_offset_ = 0
has_data_ = 0
data_ = ""
has_received_from_ = 0
received_from_ = None
has_buffer_size_ = 0
buffer_size_ = 0
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def stream_offset(self): return self.stream_offset_
def set_stream_offset(self, x):
self.has_stream_offset_ = 1
self.stream_offset_ = x
def clear_stream_offset(self):
if self.has_stream_offset_:
self.has_stream_offset_ = 0
self.stream_offset_ = 0
def has_stream_offset(self): return self.has_stream_offset_
def data(self): return self.data_
def set_data(self, x):
self.has_data_ = 1
self.data_ = x
def clear_data(self):
if self.has_data_:
self.has_data_ = 0
self.data_ = ""
def has_data(self): return self.has_data_
def received_from(self):
if self.received_from_ is None:
self.lazy_init_lock_.acquire()
try:
if self.received_from_ is None: self.received_from_ = AddressPort()
finally:
self.lazy_init_lock_.release()
return self.received_from_
def mutable_received_from(self): self.has_received_from_ = 1; return self.received_from()
def clear_received_from(self):
if self.has_received_from_:
self.has_received_from_ = 0;
if self.received_from_ is not None: self.received_from_.Clear()
def has_received_from(self): return self.has_received_from_
def buffer_size(self): return self.buffer_size_
def set_buffer_size(self, x):
self.has_buffer_size_ = 1
self.buffer_size_ = x
def clear_buffer_size(self):
if self.has_buffer_size_:
self.has_buffer_size_ = 0
self.buffer_size_ = 0
def has_buffer_size(self): return self.has_buffer_size_
def MergeFrom(self, x):
assert x is not self
if (x.has_stream_offset()): self.set_stream_offset(x.stream_offset())
if (x.has_data()): self.set_data(x.data())
if (x.has_received_from()): self.mutable_received_from().MergeFrom(x.received_from())
if (x.has_buffer_size()): self.set_buffer_size(x.buffer_size())
def Equals(self, x):
if x is self: return 1
if self.has_stream_offset_ != x.has_stream_offset_: return 0
if self.has_stream_offset_ and self.stream_offset_ != x.stream_offset_: return 0
if self.has_data_ != x.has_data_: return 0
if self.has_data_ and self.data_ != x.data_: return 0
if self.has_received_from_ != x.has_received_from_: return 0
if self.has_received_from_ and self.received_from_ != x.received_from_: return 0
if self.has_buffer_size_ != x.has_buffer_size_: return 0
if self.has_buffer_size_ and self.buffer_size_ != x.buffer_size_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_received_from_ and not self.received_from_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_stream_offset_): n += 1 + self.lengthVarInt64(self.stream_offset_)
if (self.has_data_): n += 1 + self.lengthString(len(self.data_))
if (self.has_received_from_): n += 1 + self.lengthString(self.received_from_.ByteSize())
if (self.has_buffer_size_): n += 1 + self.lengthVarInt64(self.buffer_size_)
return n
def ByteSizePartial(self):
n = 0
if (self.has_stream_offset_): n += 1 + self.lengthVarInt64(self.stream_offset_)
if (self.has_data_): n += 1 + self.lengthString(len(self.data_))
if (self.has_received_from_): n += 1 + self.lengthString(self.received_from_.ByteSizePartial())
if (self.has_buffer_size_): n += 1 + self.lengthVarInt64(self.buffer_size_)
return n
def Clear(self):
self.clear_stream_offset()
self.clear_data()
self.clear_received_from()
self.clear_buffer_size()
def OutputUnchecked(self, out):
if (self.has_stream_offset_):
out.putVarInt32(16)
out.putVarInt64(self.stream_offset_)
if (self.has_data_):
out.putVarInt32(26)
out.putPrefixedString(self.data_)
if (self.has_received_from_):
out.putVarInt32(34)
out.putVarInt32(self.received_from_.ByteSize())
self.received_from_.OutputUnchecked(out)
if (self.has_buffer_size_):
out.putVarInt32(40)
out.putVarInt32(self.buffer_size_)
def OutputPartial(self, out):
if (self.has_stream_offset_):
out.putVarInt32(16)
out.putVarInt64(self.stream_offset_)
if (self.has_data_):
out.putVarInt32(26)
out.putPrefixedString(self.data_)
if (self.has_received_from_):
out.putVarInt32(34)
out.putVarInt32(self.received_from_.ByteSizePartial())
self.received_from_.OutputPartial(out)
if (self.has_buffer_size_):
out.putVarInt32(40)
out.putVarInt32(self.buffer_size_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 16:
self.set_stream_offset(d.getVarInt64())
continue
if tt == 26:
self.set_data(d.getPrefixedString())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_received_from().TryMerge(tmp)
continue
if tt == 40:
self.set_buffer_size(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_stream_offset_: res+=prefix+("stream_offset: %s\n" % self.DebugFormatInt64(self.stream_offset_))
if self.has_data_: res+=prefix+("data: %s\n" % self.DebugFormatString(self.data_))
if self.has_received_from_:
res+=prefix+"received_from <\n"
res+=self.received_from_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_buffer_size_: res+=prefix+("buffer_size: %s\n" % self.DebugFormatInt32(self.buffer_size_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kstream_offset = 2
kdata = 3
kreceived_from = 4
kbuffer_size = 5
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
2: "stream_offset",
3: "data",
4: "received_from",
5: "buffer_size",
}, 5)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STRING,
5: ProtocolBuffer.Encoder.NUMERIC,
}, 5, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.ReceiveReply'
class PollEvent(ProtocolBuffer.ProtocolMessage):
SOCKET_POLLNONE = 0
SOCKET_POLLIN = 1
SOCKET_POLLPRI = 2
SOCKET_POLLOUT = 4
SOCKET_POLLERR = 8
SOCKET_POLLHUP = 16
SOCKET_POLLNVAL = 32
SOCKET_POLLRDNORM = 64
SOCKET_POLLRDBAND = 128
SOCKET_POLLWRNORM = 256
SOCKET_POLLWRBAND = 512
SOCKET_POLLMSG = 1024
SOCKET_POLLREMOVE = 4096
SOCKET_POLLRDHUP = 8192
_PollEventFlag_NAMES = {
0: "SOCKET_POLLNONE",
1: "SOCKET_POLLIN",
2: "SOCKET_POLLPRI",
4: "SOCKET_POLLOUT",
8: "SOCKET_POLLERR",
16: "SOCKET_POLLHUP",
32: "SOCKET_POLLNVAL",
64: "SOCKET_POLLRDNORM",
128: "SOCKET_POLLRDBAND",
256: "SOCKET_POLLWRNORM",
512: "SOCKET_POLLWRBAND",
1024: "SOCKET_POLLMSG",
4096: "SOCKET_POLLREMOVE",
8192: "SOCKET_POLLRDHUP",
}
def PollEventFlag_Name(cls, x): return cls._PollEventFlag_NAMES.get(x, "")
PollEventFlag_Name = classmethod(PollEventFlag_Name)
has_socket_descriptor_ = 0
socket_descriptor_ = ""
has_requested_events_ = 0
requested_events_ = 0
has_observed_events_ = 0
observed_events_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def socket_descriptor(self): return self.socket_descriptor_
def set_socket_descriptor(self, x):
self.has_socket_descriptor_ = 1
self.socket_descriptor_ = x
def clear_socket_descriptor(self):
if self.has_socket_descriptor_:
self.has_socket_descriptor_ = 0
self.socket_descriptor_ = ""
def has_socket_descriptor(self): return self.has_socket_descriptor_
def requested_events(self): return self.requested_events_
def set_requested_events(self, x):
self.has_requested_events_ = 1
self.requested_events_ = x
def clear_requested_events(self):
if self.has_requested_events_:
self.has_requested_events_ = 0
self.requested_events_ = 0
def has_requested_events(self): return self.has_requested_events_
def observed_events(self): return self.observed_events_
def set_observed_events(self, x):
self.has_observed_events_ = 1
self.observed_events_ = x
def clear_observed_events(self):
if self.has_observed_events_:
self.has_observed_events_ = 0
self.observed_events_ = 0
def has_observed_events(self): return self.has_observed_events_
def MergeFrom(self, x):
assert x is not self
if (x.has_socket_descriptor()): self.set_socket_descriptor(x.socket_descriptor())
if (x.has_requested_events()): self.set_requested_events(x.requested_events())
if (x.has_observed_events()): self.set_observed_events(x.observed_events())
def Equals(self, x):
if x is self: return 1
if self.has_socket_descriptor_ != x.has_socket_descriptor_: return 0
if self.has_socket_descriptor_ and self.socket_descriptor_ != x.socket_descriptor_: return 0
if self.has_requested_events_ != x.has_requested_events_: return 0
if self.has_requested_events_ and self.requested_events_ != x.requested_events_: return 0
if self.has_observed_events_ != x.has_observed_events_: return 0
if self.has_observed_events_ and self.observed_events_ != x.observed_events_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_socket_descriptor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: socket_descriptor not set.')
if (not self.has_requested_events_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: requested_events not set.')
if (not self.has_observed_events_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: observed_events not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.socket_descriptor_))
n += self.lengthVarInt64(self.requested_events_)
n += self.lengthVarInt64(self.observed_events_)
return n + 3
def ByteSizePartial(self):
n = 0
if (self.has_socket_descriptor_):
n += 1
n += self.lengthString(len(self.socket_descriptor_))
if (self.has_requested_events_):
n += 1
n += self.lengthVarInt64(self.requested_events_)
if (self.has_observed_events_):
n += 1
n += self.lengthVarInt64(self.observed_events_)
return n
def Clear(self):
self.clear_socket_descriptor()
self.clear_requested_events()
self.clear_observed_events()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
out.putVarInt32(16)
out.putVarInt32(self.requested_events_)
out.putVarInt32(24)
out.putVarInt32(self.observed_events_)
def OutputPartial(self, out):
if (self.has_socket_descriptor_):
out.putVarInt32(10)
out.putPrefixedString(self.socket_descriptor_)
if (self.has_requested_events_):
out.putVarInt32(16)
out.putVarInt32(self.requested_events_)
if (self.has_observed_events_):
out.putVarInt32(24)
out.putVarInt32(self.observed_events_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_socket_descriptor(d.getPrefixedString())
continue
if tt == 16:
self.set_requested_events(d.getVarInt32())
continue
if tt == 24:
self.set_observed_events(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_socket_descriptor_: res+=prefix+("socket_descriptor: %s\n" % self.DebugFormatString(self.socket_descriptor_))
if self.has_requested_events_: res+=prefix+("requested_events: %s\n" % self.DebugFormatInt32(self.requested_events_))
if self.has_observed_events_: res+=prefix+("observed_events: %s\n" % self.DebugFormatInt32(self.observed_events_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
ksocket_descriptor = 1
krequested_events = 2
kobserved_events = 3
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "socket_descriptor",
2: "requested_events",
3: "observed_events",
}, 3)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
3: ProtocolBuffer.Encoder.NUMERIC,
}, 3, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.PollEvent'
class PollRequest(ProtocolBuffer.ProtocolMessage):
has_timeout_seconds_ = 0
timeout_seconds_ = -1.0
def __init__(self, contents=None):
self.events_ = []
if contents is not None: self.MergeFromString(contents)
def events_size(self): return len(self.events_)
def events_list(self): return self.events_
def events(self, i):
return self.events_[i]
def mutable_events(self, i):
return self.events_[i]
def add_events(self):
x = PollEvent()
self.events_.append(x)
return x
def clear_events(self):
self.events_ = []
def timeout_seconds(self): return self.timeout_seconds_
def set_timeout_seconds(self, x):
self.has_timeout_seconds_ = 1
self.timeout_seconds_ = x
def clear_timeout_seconds(self):
if self.has_timeout_seconds_:
self.has_timeout_seconds_ = 0
self.timeout_seconds_ = -1.0
def has_timeout_seconds(self): return self.has_timeout_seconds_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.events_size()): self.add_events().CopyFrom(x.events(i))
if (x.has_timeout_seconds()): self.set_timeout_seconds(x.timeout_seconds())
def Equals(self, x):
if x is self: return 1
if len(self.events_) != len(x.events_): return 0
for e1, e2 in zip(self.events_, x.events_):
if e1 != e2: return 0
if self.has_timeout_seconds_ != x.has_timeout_seconds_: return 0
if self.has_timeout_seconds_ and self.timeout_seconds_ != x.timeout_seconds_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.events_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.events_)
for i in xrange(len(self.events_)): n += self.lengthString(self.events_[i].ByteSize())
if (self.has_timeout_seconds_): n += 9
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.events_)
for i in xrange(len(self.events_)): n += self.lengthString(self.events_[i].ByteSizePartial())
if (self.has_timeout_seconds_): n += 9
return n
def Clear(self):
self.clear_events()
self.clear_timeout_seconds()
def OutputUnchecked(self, out):
for i in xrange(len(self.events_)):
out.putVarInt32(10)
out.putVarInt32(self.events_[i].ByteSize())
self.events_[i].OutputUnchecked(out)
if (self.has_timeout_seconds_):
out.putVarInt32(17)
out.putDouble(self.timeout_seconds_)
def OutputPartial(self, out):
for i in xrange(len(self.events_)):
out.putVarInt32(10)
out.putVarInt32(self.events_[i].ByteSizePartial())
self.events_[i].OutputPartial(out)
if (self.has_timeout_seconds_):
out.putVarInt32(17)
out.putDouble(self.timeout_seconds_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_events().TryMerge(tmp)
continue
if tt == 17:
self.set_timeout_seconds(d.getDouble())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.events_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("events%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_timeout_seconds_: res+=prefix+("timeout_seconds: %s\n" % self.DebugFormat(self.timeout_seconds_))
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kevents = 1
ktimeout_seconds = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "events",
2: "timeout_seconds",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.DOUBLE,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.PollRequest'
class PollReply(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.events_ = []
if contents is not None: self.MergeFromString(contents)
def events_size(self): return len(self.events_)
def events_list(self): return self.events_
def events(self, i):
return self.events_[i]
def mutable_events(self, i):
return self.events_[i]
def add_events(self):
x = PollEvent()
self.events_.append(x)
return x
def clear_events(self):
self.events_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.events_size()): self.add_events().CopyFrom(x.events(i))
def Equals(self, x):
if x is self: return 1
if len(self.events_) != len(x.events_): return 0
for e1, e2 in zip(self.events_, x.events_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.events_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.events_)
for i in xrange(len(self.events_)): n += self.lengthString(self.events_[i].ByteSize())
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.events_)
for i in xrange(len(self.events_)): n += self.lengthString(self.events_[i].ByteSizePartial())
return n
def Clear(self):
self.clear_events()
def OutputUnchecked(self, out):
for i in xrange(len(self.events_)):
out.putVarInt32(18)
out.putVarInt32(self.events_[i].ByteSize())
self.events_[i].OutputUnchecked(out)
def OutputPartial(self, out):
for i in xrange(len(self.events_)):
out.putVarInt32(18)
out.putVarInt32(self.events_[i].ByteSizePartial())
self.events_[i].OutputPartial(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_events().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.events_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("events%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kevents = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
2: "events",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.PollReply'
class ResolveRequest(ProtocolBuffer.ProtocolMessage):
has_name_ = 0
name_ = ""
def __init__(self, contents=None):
self.address_families_ = []
if contents is not None: self.MergeFromString(contents)
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def address_families_size(self): return len(self.address_families_)
def address_families_list(self): return self.address_families_
def address_families(self, i):
return self.address_families_[i]
def set_address_families(self, i, x):
self.address_families_[i] = x
def add_address_families(self, x):
self.address_families_.append(x)
def clear_address_families(self):
self.address_families_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_name()): self.set_name(x.name())
for i in xrange(x.address_families_size()): self.add_address_families(x.address_families(i))
def Equals(self, x):
if x is self: return 1
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if len(self.address_families_) != len(x.address_families_): return 0
for e1, e2 in zip(self.address_families_, x.address_families_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.name_))
n += 1 * len(self.address_families_)
for i in xrange(len(self.address_families_)): n += self.lengthVarInt64(self.address_families_[i])
return n + 1
def ByteSizePartial(self):
n = 0
if (self.has_name_):
n += 1
n += self.lengthString(len(self.name_))
n += 1 * len(self.address_families_)
for i in xrange(len(self.address_families_)): n += self.lengthVarInt64(self.address_families_[i])
return n
def Clear(self):
self.clear_name()
self.clear_address_families()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.name_)
for i in xrange(len(self.address_families_)):
out.putVarInt32(16)
out.putVarInt32(self.address_families_[i])
def OutputPartial(self, out):
if (self.has_name_):
out.putVarInt32(10)
out.putPrefixedString(self.name_)
for i in xrange(len(self.address_families_)):
out.putVarInt32(16)
out.putVarInt32(self.address_families_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_name(d.getPrefixedString())
continue
if tt == 16:
self.add_address_families(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
cnt=0
for e in self.address_families_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("address_families%s: %s\n" % (elm, self.DebugFormatInt32(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kname = 1
kaddress_families = 2
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
1: "name",
2: "address_families",
}, 2)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
1: ProtocolBuffer.Encoder.STRING,
2: ProtocolBuffer.Encoder.NUMERIC,
}, 2, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.ResolveRequest'
class ResolveReply(ProtocolBuffer.ProtocolMessage):
SOCKET_EAI_ADDRFAMILY = 1
SOCKET_EAI_AGAIN = 2
SOCKET_EAI_BADFLAGS = 3
SOCKET_EAI_FAIL = 4
SOCKET_EAI_FAMILY = 5
SOCKET_EAI_MEMORY = 6
SOCKET_EAI_NODATA = 7
SOCKET_EAI_NONAME = 8
SOCKET_EAI_SERVICE = 9
SOCKET_EAI_SOCKTYPE = 10
SOCKET_EAI_SYSTEM = 11
SOCKET_EAI_BADHINTS = 12
SOCKET_EAI_PROTOCOL = 13
SOCKET_EAI_OVERFLOW = 14
SOCKET_EAI_MAX = 15
_ErrorCode_NAMES = {
1: "SOCKET_EAI_ADDRFAMILY",
2: "SOCKET_EAI_AGAIN",
3: "SOCKET_EAI_BADFLAGS",
4: "SOCKET_EAI_FAIL",
5: "SOCKET_EAI_FAMILY",
6: "SOCKET_EAI_MEMORY",
7: "SOCKET_EAI_NODATA",
8: "SOCKET_EAI_NONAME",
9: "SOCKET_EAI_SERVICE",
10: "SOCKET_EAI_SOCKTYPE",
11: "SOCKET_EAI_SYSTEM",
12: "SOCKET_EAI_BADHINTS",
13: "SOCKET_EAI_PROTOCOL",
14: "SOCKET_EAI_OVERFLOW",
15: "SOCKET_EAI_MAX",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
has_canonical_name_ = 0
canonical_name_ = ""
def __init__(self, contents=None):
self.packed_address_ = []
self.aliases_ = []
if contents is not None: self.MergeFromString(contents)
def packed_address_size(self): return len(self.packed_address_)
def packed_address_list(self): return self.packed_address_
def packed_address(self, i):
return self.packed_address_[i]
def set_packed_address(self, i, x):
self.packed_address_[i] = x
def add_packed_address(self, x):
self.packed_address_.append(x)
def clear_packed_address(self):
self.packed_address_ = []
def canonical_name(self): return self.canonical_name_
def set_canonical_name(self, x):
self.has_canonical_name_ = 1
self.canonical_name_ = x
def clear_canonical_name(self):
if self.has_canonical_name_:
self.has_canonical_name_ = 0
self.canonical_name_ = ""
def has_canonical_name(self): return self.has_canonical_name_
def aliases_size(self): return len(self.aliases_)
def aliases_list(self): return self.aliases_
def aliases(self, i):
return self.aliases_[i]
def set_aliases(self, i, x):
self.aliases_[i] = x
def add_aliases(self, x):
self.aliases_.append(x)
def clear_aliases(self):
self.aliases_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.packed_address_size()): self.add_packed_address(x.packed_address(i))
if (x.has_canonical_name()): self.set_canonical_name(x.canonical_name())
for i in xrange(x.aliases_size()): self.add_aliases(x.aliases(i))
def Equals(self, x):
if x is self: return 1
if len(self.packed_address_) != len(x.packed_address_): return 0
for e1, e2 in zip(self.packed_address_, x.packed_address_):
if e1 != e2: return 0
if self.has_canonical_name_ != x.has_canonical_name_: return 0
if self.has_canonical_name_ and self.canonical_name_ != x.canonical_name_: return 0
if len(self.aliases_) != len(x.aliases_): return 0
for e1, e2 in zip(self.aliases_, x.aliases_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.packed_address_)
for i in xrange(len(self.packed_address_)): n += self.lengthString(len(self.packed_address_[i]))
if (self.has_canonical_name_): n += 1 + self.lengthString(len(self.canonical_name_))
n += 1 * len(self.aliases_)
for i in xrange(len(self.aliases_)): n += self.lengthString(len(self.aliases_[i]))
return n
def ByteSizePartial(self):
n = 0
n += 1 * len(self.packed_address_)
for i in xrange(len(self.packed_address_)): n += self.lengthString(len(self.packed_address_[i]))
if (self.has_canonical_name_): n += 1 + self.lengthString(len(self.canonical_name_))
n += 1 * len(self.aliases_)
for i in xrange(len(self.aliases_)): n += self.lengthString(len(self.aliases_[i]))
return n
def Clear(self):
self.clear_packed_address()
self.clear_canonical_name()
self.clear_aliases()
def OutputUnchecked(self, out):
for i in xrange(len(self.packed_address_)):
out.putVarInt32(18)
out.putPrefixedString(self.packed_address_[i])
if (self.has_canonical_name_):
out.putVarInt32(26)
out.putPrefixedString(self.canonical_name_)
for i in xrange(len(self.aliases_)):
out.putVarInt32(34)
out.putPrefixedString(self.aliases_[i])
def OutputPartial(self, out):
for i in xrange(len(self.packed_address_)):
out.putVarInt32(18)
out.putPrefixedString(self.packed_address_[i])
if (self.has_canonical_name_):
out.putVarInt32(26)
out.putPrefixedString(self.canonical_name_)
for i in xrange(len(self.aliases_)):
out.putVarInt32(34)
out.putPrefixedString(self.aliases_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 18:
self.add_packed_address(d.getPrefixedString())
continue
if tt == 26:
self.set_canonical_name(d.getPrefixedString())
continue
if tt == 34:
self.add_aliases(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.packed_address_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("packed_address%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_canonical_name_: res+=prefix+("canonical_name: %s\n" % self.DebugFormatString(self.canonical_name_))
cnt=0
for e in self.aliases_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("aliases%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
def _BuildTagLookupTable(sparse, maxtag, default=None):
return tuple([sparse.get(i, default) for i in xrange(0, 1+maxtag)])
kpacked_address = 2
kcanonical_name = 3
kaliases = 4
_TEXT = _BuildTagLookupTable({
0: "ErrorCode",
2: "packed_address",
3: "canonical_name",
4: "aliases",
}, 4)
_TYPES = _BuildTagLookupTable({
0: ProtocolBuffer.Encoder.NUMERIC,
2: ProtocolBuffer.Encoder.STRING,
3: ProtocolBuffer.Encoder.STRING,
4: ProtocolBuffer.Encoder.STRING,
}, 4, ProtocolBuffer.Encoder.MAX_TYPE)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
_PROTO_DESCRIPTOR_NAME = 'apphosting.ResolveReply'
if _extension_runtime:
pass
__all__ = ['RemoteSocketServiceError','AddressPort','CreateSocketRequest','CreateSocketReply','BindRequest','BindReply','GetSocketNameRequest','GetSocketNameReply','GetPeerNameRequest','GetPeerNameReply','SocketOption','SetSocketOptionsRequest','SetSocketOptionsReply','GetSocketOptionsRequest','GetSocketOptionsReply','ConnectRequest','ConnectReply','ListenRequest','ListenReply','AcceptRequest','AcceptReply','ShutDownRequest','ShutDownReply','CloseRequest','CloseReply','SendRequest','SendReply','ReceiveRequest','ReceiveReply','PollEvent','PollRequest','PollReply','ResolveRequest','ResolveReply']
| 29.422863
| 601
| 0.689542
| 21,902
| 164,209
| 4.849785
| 0.030728
| 0.043561
| 0.037705
| 0.030531
| 0.829128
| 0.790802
| 0.765393
| 0.743834
| 0.725579
| 0.708012
| 0
| 0.025217
| 0.199672
| 164,209
| 5,580
| 602
| 29.428136
| 0.783026
| 0.003465
| 0
| 0.71531
| 0
| 0
| 0.048074
| 0.006521
| 0
| 0
| 0
| 0
| 0.007622
| 1
| 0.161399
| false
| 0.003811
| 0.000672
| 0.042591
| 0.35575
| 0.014571
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
91e7a43e6df9397bd1ebbdafcf89164b86a26003
| 82,504
|
py
|
Python
|
python/powermeter_api/api/heatsink_api.py
|
thracesystems/powermeter-api
|
7bdab034ff916ee49e986de88f157bd044e981c1
|
[
"Apache-2.0"
] | null | null | null |
python/powermeter_api/api/heatsink_api.py
|
thracesystems/powermeter-api
|
7bdab034ff916ee49e986de88f157bd044e981c1
|
[
"Apache-2.0"
] | null | null | null |
python/powermeter_api/api/heatsink_api.py
|
thracesystems/powermeter-api
|
7bdab034ff916ee49e986de88f157bd044e981c1
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
PowerMeter API
API # noqa: E501
The version of the OpenAPI document: 2021.4.1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from powermeter_api.api_client import ApiClient
from powermeter_api.exceptions import ( # noqa: F401
ApiTypeError,
ApiValueError
)
class HeatsinkApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def haetsink_library_clone(self, heatsinkmasterid, **kwargs): # noqa: E501
"""haetsink_library_clone # noqa: E501
Clone a heat sink # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.haetsink_library_clone(heatsinkmasterid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkmasterid: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: HeatsinkMaster
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.haetsink_library_clone_with_http_info(heatsinkmasterid, **kwargs) # noqa: E501
def haetsink_library_clone_with_http_info(self, heatsinkmasterid, **kwargs): # noqa: E501
"""haetsink_library_clone # noqa: E501
Clone a heat sink # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.haetsink_library_clone_with_http_info(heatsinkmasterid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkmasterid: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(HeatsinkMaster, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'heatsinkmasterid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method haetsink_library_clone" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'heatsinkmasterid' is set
if self.api_client.client_side_validation and ('heatsinkmasterid' not in local_var_params or # noqa: E501
local_var_params['heatsinkmasterid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `heatsinkmasterid` when calling `haetsink_library_clone`") # noqa: E501
collection_formats = {}
path_params = {}
if 'heatsinkmasterid' in local_var_params:
path_params['heatsinkmasterid'] = local_var_params['heatsinkmasterid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/library/{heatsinkmasterid}/clone/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HeatsinkMaster', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_commit_create(self, heatsinkid, data, **kwargs): # noqa: E501
"""heatsink_commit_create # noqa: E501
Commit a version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_commit_create(heatsinkid, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param HeatsinkVersion data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: HeatsinkVersion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_commit_create_with_http_info(heatsinkid, data, **kwargs) # noqa: E501
def heatsink_commit_create_with_http_info(self, heatsinkid, data, **kwargs): # noqa: E501
"""heatsink_commit_create # noqa: E501
Commit a version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_commit_create_with_http_info(heatsinkid, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param HeatsinkVersion data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(HeatsinkVersion, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'heatsinkid',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_commit_create" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'heatsinkid' is set
if self.api_client.client_side_validation and ('heatsinkid' not in local_var_params or # noqa: E501
local_var_params['heatsinkid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `heatsinkid` when calling `heatsink_commit_create`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `heatsink_commit_create`") # noqa: E501
collection_formats = {}
path_params = {}
if 'heatsinkid' in local_var_params:
path_params['heatsinkid'] = local_var_params['heatsinkid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/{heatsinkid}/commit/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HeatsinkVersion', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_library_create(self, data, **kwargs): # noqa: E501
"""heatsink_library_create # noqa: E501
Create a heatsink # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_library_create(data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param HeatsinkMaster data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: HeatsinkMaster
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_library_create_with_http_info(data, **kwargs) # noqa: E501
def heatsink_library_create_with_http_info(self, data, **kwargs): # noqa: E501
"""heatsink_library_create # noqa: E501
Create a heatsink # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_library_create_with_http_info(data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param HeatsinkMaster data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(HeatsinkMaster, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_library_create" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `heatsink_library_create`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/library/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HeatsinkMaster', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_library_delete(self, heatsinkmasterid, **kwargs): # noqa: E501
"""heatsink_library_delete # noqa: E501
Delete a heatsink # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_library_delete(heatsinkmasterid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkmasterid: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_library_delete_with_http_info(heatsinkmasterid, **kwargs) # noqa: E501
def heatsink_library_delete_with_http_info(self, heatsinkmasterid, **kwargs): # noqa: E501
"""heatsink_library_delete # noqa: E501
Delete a heatsink # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_library_delete_with_http_info(heatsinkmasterid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkmasterid: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'heatsinkmasterid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_library_delete" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'heatsinkmasterid' is set
if self.api_client.client_side_validation and ('heatsinkmasterid' not in local_var_params or # noqa: E501
local_var_params['heatsinkmasterid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `heatsinkmasterid` when calling `heatsink_library_delete`") # noqa: E501
collection_formats = {}
path_params = {}
if 'heatsinkmasterid' in local_var_params:
path_params['heatsinkmasterid'] = local_var_params['heatsinkmasterid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/library/{heatsinkmasterid}/', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_library_list(self, **kwargs): # noqa: E501
"""heatsink_library_list # noqa: E501
Get list of heat sinks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_library_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[HeatsinkMaster]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_library_list_with_http_info(**kwargs) # noqa: E501
def heatsink_library_list_with_http_info(self, **kwargs): # noqa: E501
"""heatsink_library_list # noqa: E501
Get list of heat sinks # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_library_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[HeatsinkMaster], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_library_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/library/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[HeatsinkMaster]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_library_read(self, heatsinkmasterid, **kwargs): # noqa: E501
"""heatsink_library_read # noqa: E501
Get a heatsink # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_library_read(heatsinkmasterid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkmasterid: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: HeatsinkMaster
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_library_read_with_http_info(heatsinkmasterid, **kwargs) # noqa: E501
def heatsink_library_read_with_http_info(self, heatsinkmasterid, **kwargs): # noqa: E501
"""heatsink_library_read # noqa: E501
Get a heatsink # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_library_read_with_http_info(heatsinkmasterid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkmasterid: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(HeatsinkMaster, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'heatsinkmasterid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_library_read" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'heatsinkmasterid' is set
if self.api_client.client_side_validation and ('heatsinkmasterid' not in local_var_params or # noqa: E501
local_var_params['heatsinkmasterid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `heatsinkmasterid` when calling `heatsink_library_read`") # noqa: E501
collection_formats = {}
path_params = {}
if 'heatsinkmasterid' in local_var_params:
path_params['heatsinkmasterid'] = local_var_params['heatsinkmasterid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/library/{heatsinkmasterid}/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HeatsinkMaster', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_library_update(self, heatsinkmasterid, data, **kwargs): # noqa: E501
"""heatsink_library_update # noqa: E501
Update a heatsink # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_library_update(heatsinkmasterid, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkmasterid: (required)
:param HeatsinkMaster data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: HeatsinkMaster
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_library_update_with_http_info(heatsinkmasterid, data, **kwargs) # noqa: E501
def heatsink_library_update_with_http_info(self, heatsinkmasterid, data, **kwargs): # noqa: E501
"""heatsink_library_update # noqa: E501
Update a heatsink # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_library_update_with_http_info(heatsinkmasterid, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkmasterid: (required)
:param HeatsinkMaster data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(HeatsinkMaster, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'heatsinkmasterid',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_library_update" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'heatsinkmasterid' is set
if self.api_client.client_side_validation and ('heatsinkmasterid' not in local_var_params or # noqa: E501
local_var_params['heatsinkmasterid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `heatsinkmasterid` when calling `heatsink_library_update`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `heatsink_library_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'heatsinkmasterid' in local_var_params:
path_params['heatsinkmasterid'] = local_var_params['heatsinkmasterid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/library/{heatsinkmasterid}/', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HeatsinkMaster', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_library_update_list(self, **kwargs): # noqa: E501
"""heatsink_library_update_list # noqa: E501
Returns an array of [current, last update] times # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_library_update_list(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: UpdateData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_library_update_list_with_http_info(**kwargs) # noqa: E501
def heatsink_library_update_list_with_http_info(self, **kwargs): # noqa: E501
"""heatsink_library_update_list # noqa: E501
Returns an array of [current, last update] times # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_library_update_list_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(UpdateData, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_library_update_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/library/update/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UpdateData', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_library_version_list(self, heatsinkmasterid, **kwargs): # noqa: E501
"""heatsink_library_version_list # noqa: E501
Get Heatsink versions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_library_version_list(heatsinkmasterid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkmasterid: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: HeatsinkVersion
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_library_version_list_with_http_info(heatsinkmasterid, **kwargs) # noqa: E501
def heatsink_library_version_list_with_http_info(self, heatsinkmasterid, **kwargs): # noqa: E501
"""heatsink_library_version_list # noqa: E501
Get Heatsink versions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_library_version_list_with_http_info(heatsinkmasterid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkmasterid: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(HeatsinkVersion, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'heatsinkmasterid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_library_version_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'heatsinkmasterid' is set
if self.api_client.client_side_validation and ('heatsinkmasterid' not in local_var_params or # noqa: E501
local_var_params['heatsinkmasterid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `heatsinkmasterid` when calling `heatsink_library_version_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'heatsinkmasterid' in local_var_params:
path_params['heatsinkmasterid'] = local_var_params['heatsinkmasterid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/library/{heatsinkmasterid}/version/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='HeatsinkVersion', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_permissions_list(self, heatsinkid, **kwargs): # noqa: E501
"""heatsink_permissions_list # noqa: E501
Get list of permissions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_permissions_list(heatsinkid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_permissions_list_with_http_info(heatsinkid, **kwargs) # noqa: E501
def heatsink_permissions_list_with_http_info(self, heatsinkid, **kwargs): # noqa: E501
"""heatsink_permissions_list # noqa: E501
Get list of permissions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_permissions_list_with_http_info(heatsinkid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'heatsinkid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_permissions_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'heatsinkid' is set
if self.api_client.client_side_validation and ('heatsinkid' not in local_var_params or # noqa: E501
local_var_params['heatsinkid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `heatsinkid` when calling `heatsink_permissions_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'heatsinkid' in local_var_params:
path_params['heatsinkid'] = local_var_params['heatsinkid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/{heatsinkid}/permissions/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_permissions_update(self, heatsinkid, **kwargs): # noqa: E501
"""heatsink_permissions_update # noqa: E501
Get list of permissions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_permissions_update(heatsinkid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_permissions_update_with_http_info(heatsinkid, **kwargs) # noqa: E501
def heatsink_permissions_update_with_http_info(self, heatsinkid, **kwargs): # noqa: E501
"""heatsink_permissions_update # noqa: E501
Get list of permissions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_permissions_update_with_http_info(heatsinkid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: None
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'heatsinkid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_permissions_update" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'heatsinkid' is set
if self.api_client.client_side_validation and ('heatsinkid' not in local_var_params or # noqa: E501
local_var_params['heatsinkid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `heatsinkid` when calling `heatsink_permissions_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'heatsinkid' in local_var_params:
path_params['heatsinkid'] = local_var_params['heatsinkid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/{heatsinkid}/permissions/', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_read(self, heatsinkid, **kwargs): # noqa: E501
"""heatsink_read # noqa: E501
Get a heatsink # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_read(heatsinkid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Heatsink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_read_with_http_info(heatsinkid, **kwargs) # noqa: E501
def heatsink_read_with_http_info(self, heatsinkid, **kwargs): # noqa: E501
"""heatsink_read # noqa: E501
Get a heatsink # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_read_with_http_info(heatsinkid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Heatsink, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'heatsinkid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_read" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'heatsinkid' is set
if self.api_client.client_side_validation and ('heatsinkid' not in local_var_params or # noqa: E501
local_var_params['heatsinkid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `heatsinkid` when calling `heatsink_read`") # noqa: E501
collection_formats = {}
path_params = {}
if 'heatsinkid' in local_var_params:
path_params['heatsinkid'] = local_var_params['heatsinkid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/{heatsinkid}/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Heatsink', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_restore_create(self, heatsinkid, **kwargs): # noqa: E501
"""heatsink_restore_create # noqa: E501
Restore a heatsink version as current version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_restore_create(heatsinkid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Heatsink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_restore_create_with_http_info(heatsinkid, **kwargs) # noqa: E501
def heatsink_restore_create_with_http_info(self, heatsinkid, **kwargs): # noqa: E501
"""heatsink_restore_create # noqa: E501
Restore a heatsink version as current version # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_restore_create_with_http_info(heatsinkid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Heatsink, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'heatsinkid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_restore_create" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'heatsinkid' is set
if self.api_client.client_side_validation and ('heatsinkid' not in local_var_params or # noqa: E501
local_var_params['heatsinkid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `heatsinkid` when calling `heatsink_restore_create`") # noqa: E501
collection_formats = {}
path_params = {}
if 'heatsinkid' in local_var_params:
path_params['heatsinkid'] = local_var_params['heatsinkid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/{heatsinkid}/restore/', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Heatsink', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_update(self, heatsinkid, data, **kwargs): # noqa: E501
"""heatsink_update # noqa: E501
Update a heatsink # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_update(heatsinkid, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param Heatsink data: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: Heatsink
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_update_with_http_info(heatsinkid, data, **kwargs) # noqa: E501
def heatsink_update_with_http_info(self, heatsinkid, data, **kwargs): # noqa: E501
"""heatsink_update # noqa: E501
Update a heatsink # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_update_with_http_info(heatsinkid, data, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param Heatsink data: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(Heatsink, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'heatsinkid',
'data'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_update" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'heatsinkid' is set
if self.api_client.client_side_validation and ('heatsinkid' not in local_var_params or # noqa: E501
local_var_params['heatsinkid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `heatsinkid` when calling `heatsink_update`") # noqa: E501
# verify the required parameter 'data' is set
if self.api_client.client_side_validation and ('data' not in local_var_params or # noqa: E501
local_var_params['data'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `data` when calling `heatsink_update`") # noqa: E501
collection_formats = {}
path_params = {}
if 'heatsinkid' in local_var_params:
path_params['heatsinkid'] = local_var_params['heatsinkid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'data' in local_var_params:
body_params = local_var_params['data']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.select_header_content_type( # noqa: E501
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/{heatsinkid}/', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='Heatsink', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_update_list(self, heatsinkid, **kwargs): # noqa: E501
"""heatsink_update_list # noqa: E501
Returns an array of [current, last update] times # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_update_list(heatsinkid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: UpdateData
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_update_list_with_http_info(heatsinkid, **kwargs) # noqa: E501
def heatsink_update_list_with_http_info(self, heatsinkid, **kwargs): # noqa: E501
"""heatsink_update_list # noqa: E501
Returns an array of [current, last update] times # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_update_list_with_http_info(heatsinkid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(UpdateData, status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'heatsinkid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_update_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'heatsinkid' is set
if self.api_client.client_side_validation and ('heatsinkid' not in local_var_params or # noqa: E501
local_var_params['heatsinkid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `heatsinkid` when calling `heatsink_update_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'heatsinkid' in local_var_params:
path_params['heatsinkid'] = local_var_params['heatsinkid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/{heatsinkid}/update/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='UpdateData', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
def heatsink_version_list(self, heatsinkid, **kwargs): # noqa: E501
"""heatsink_version_list # noqa: E501
Get Heatsink versions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_version_list(heatsinkid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: list[HeatsinkVersion]
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
return self.heatsink_version_list_with_http_info(heatsinkid, **kwargs) # noqa: E501
def heatsink_version_list_with_http_info(self, heatsinkid, **kwargs): # noqa: E501
"""heatsink_version_list # noqa: E501
Get Heatsink versions # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.heatsink_version_list_with_http_info(heatsinkid, async_req=True)
>>> result = thread.get()
:param async_req bool: execute request asynchronously
:param int heatsinkid: (required)
:param _return_http_data_only: response data without head status code
and headers
:param _preload_content: if False, the urllib3.HTTPResponse object will
be returned without reading/decoding response
data. Default is True.
:param _request_timeout: timeout setting for this request. If one
number provided, it will be total request
timeout. It can also be a pair (tuple) of
(connection, read) timeouts.
:return: tuple(list[HeatsinkVersion], status_code(int), headers(HTTPHeaderDict))
If the method is called asynchronously,
returns the request thread.
"""
local_var_params = locals()
all_params = [
'heatsinkid'
]
all_params.extend(
[
'async_req',
'_return_http_data_only',
'_preload_content',
'_request_timeout'
]
)
for key, val in six.iteritems(local_var_params['kwargs']):
if key not in all_params:
raise ApiTypeError(
"Got an unexpected keyword argument '%s'"
" to method heatsink_version_list" % key
)
local_var_params[key] = val
del local_var_params['kwargs']
# verify the required parameter 'heatsinkid' is set
if self.api_client.client_side_validation and ('heatsinkid' not in local_var_params or # noqa: E501
local_var_params['heatsinkid'] is None): # noqa: E501
raise ApiValueError("Missing the required parameter `heatsinkid` when calling `heatsink_version_list`") # noqa: E501
collection_formats = {}
path_params = {}
if 'heatsinkid' in local_var_params:
path_params['heatsinkid'] = local_var_params['heatsinkid'] # noqa: E501
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['Basic'] # noqa: E501
return self.api_client.call_api(
'/heatsink/{heatsinkid}/version/', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='list[HeatsinkVersion]', # noqa: E501
auth_settings=auth_settings,
async_req=local_var_params.get('async_req'),
_return_http_data_only=local_var_params.get('_return_http_data_only'), # noqa: E501
_preload_content=local_var_params.get('_preload_content', True),
_request_timeout=local_var_params.get('_request_timeout'),
collection_formats=collection_formats)
| 44.002133
| 143
| 0.588493
| 8,668
| 82,504
| 5.346447
| 0.02192
| 0.042466
| 0.059211
| 0.031073
| 0.985953
| 0.984917
| 0.982457
| 0.977515
| 0.974538
| 0.971603
| 0
| 0.014427
| 0.340493
| 82,504
| 1,874
| 144
| 44.025614
| 0.837279
| 0.445178
| 0
| 0.781921
| 1
| 0
| 0.181413
| 0.054394
| 0
| 0
| 0
| 0
| 0
| 1
| 0.037288
| false
| 0
| 0.00565
| 0
| 0.080226
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
72df5e9ed1e09b6422dd12bdb2d48340f55310cf
| 12,529
|
py
|
Python
|
Archieved FP/CARLA Simulation/Controller/Controller_2D.py
|
fadamsyah/final-project
|
636aa6c5fbb84a1325b662c48b52b5065f4c16c5
|
[
"MIT"
] | 3
|
2020-07-24T16:06:12.000Z
|
2021-04-10T11:41:19.000Z
|
Archieved FP/CARLA Simulation/Controller/Controller_2D.py
|
fadamsyah/final-project
|
636aa6c5fbb84a1325b662c48b52b5065f4c16c5
|
[
"MIT"
] | null | null | null |
Archieved FP/CARLA Simulation/Controller/Controller_2D.py
|
fadamsyah/final-project
|
636aa6c5fbb84a1325b662c48b52b5065f4c16c5
|
[
"MIT"
] | 3
|
2021-04-10T11:41:21.000Z
|
2021-09-09T07:38:45.000Z
|
# This code is written in Python 3 environment
import numpy as np
from numba import njit, float64, int64
from numba.experimental import jitclass
# Stanley Controller & PID Controller with Feed Forward Term
#########################################################################################
# VERSION 1
spec_v1 = [('_kp', float64), ('_ki', float64), ('_kd', float64), ('_ff_params', float64[:]),
('_sat_long_max', float64), ('_sat_long_min', float64),
('_ev', float64), ('_ev_last', float64), ('_ev_sum', float64),
('_ev_sum_max', float64), ('_ev_sum_min', float64),
('_ks', float64), ('_kv', float64), ('_l', float64),
('_dead_band', float64), ('_sat_lat_max', float64),
('_sat_lat_min', float64), ('_e_lat', float64), ('_e_yaw', float64),
('_waypoints', float64[:, :]), ('_closest_idx', int64),]
@jitclass(spec_v1)
class Controller_v1(object):
def __init__(self, kp, ki, kd, feed_forward_params, sat_long,\
ks, kv, length, lateral_dead_band, sat_lat,\
waypoints):
# In this version, the integral term will be clamped based on the
# saturation value and the feed-forward term
# The parameters of the longitudinal controller
self._kp = kp
self._ki = ki
self._kd = kd
self._ff_params = feed_forward_params
self._sat_long_max = max(sat_long[0], sat_long[1])
self._sat_long_min = min(sat_long[0], sat_long[1])
self._ev = 0.
self._ev_last = 0.
self._ev_sum = 0.
self._ev_sum_max = 0. # This value will be updated in each iteration
self._ev_sum_min = 0. # This value will be updated in each iteration
# The parameters of the lateral controller
self._ks = ks
self._kv = kv
self._l = length
self._dead_band = lateral_dead_band
self._sat_lat_max = np.fmax(sat_lat[0], sat_lat[1])
self._sat_lat_min = np.fmin(sat_lat[0], sat_lat[1])
self._e_lat = 0.
self._e_yaw = 0.
# Waypoints (n, 5) -> x, y, yaw, v, curvature
self._waypoints = waypoints
self._closest_idx = 0
def update_waypoints(self, new_waypoints):
self._waypoints = new_waypoints
def reset_integral_derivative(self):
self._ev_sum = 0.0
self._ev_last = 0.0
def get_error(self):
return self._ev, self._e_lat, self._e_yaw
def get_closest_index(self):
return self._closest_idx
def get_instantaneous_setpoint(self):
return self._waypoints[self._closest_idx]
def _update_error(self, x, y, v, yaw):
# Waypoints (n, 5) -> x, y, yaw, v, curvature
# Find the closest waypoint
self._closest_idx = np.argmin(np.sum(np.square(self._waypoints[:, :2] - np.array([x, y])), axis=-1))
# Find the yaw error
self._e_yaw = self._waypoints[self._closest_idx, 2] - yaw
self._e_yaw = (self._e_yaw + np.pi) % (2 * np.pi) - np.pi # Wrap the angle to [-pi, pi)
# Find the lateral or crosstrack error
if self._closest_idx == 0:
idx = 1
else:
idx = self._closest_idx
y2 = self._waypoints[idx, 1]
x2 = self._waypoints[idx, 0]
y1 = self._waypoints[idx - 1, 1]
x1 = self._waypoints[idx - 1, 0]
dy = y2 - y1
dx = x2 - x1
c = dx*y1 - dy*x1
self._e_lat = (dy*x + c - dx*y) \
/ (np.sqrt(dx**2 + dy**2) + 10**(-32))
# Find the velocity error
self._ev = self._waypoints[self._closest_idx, 3] - v
def _feed_forward_longitudinal(self, v):
if v < 0.:
return 0.
else:
return self._ff_params[0] * (1. - np.exp(- self._ff_params[1] * v))
'''
def _feed_forward_lateral(self):
temp = self._l * self._waypoints[self._closest_idx, -1]
if np.abs(temp) > 1.:
temp = np.sign(temp)
return np.fmax(np.fmin(np.arcsin(temp), self._sat_lat_max), self._sat_lat_min) # From -pi/2 to pi/2
'''
def _feed_forward_lateral(self):
temp = self._l * self._waypoints[self._closest_idx, -1]
return np.fmax(np.fmin(np.arctan(temp), self._sat_lat_max), self._sat_lat_min) # From -pi/2 to pi/2
def calculate_control_signal(self, dt, x, y, v, yaw):
# Waypoints (n, 5) -> x, y, yaw, v, curvature
self._update_error(x, y, v, yaw)
# Longitudinal control
ff_long = self._feed_forward_longitudinal(self._waypoints[self._closest_idx, 3])
ev_dot = (self._ev - self._ev_last) / dt
self._ev_sum_max = np.fmax((self._sat_long_max - ff_long)/self._ki, 0.)
self._ev_sum_min = np.fmin((self._sat_long_min - ff_long)/self._ki, 0.)
self._ev_sum = self._ev_sum + self._ev * dt
self._ev_sum = np.fmax(np.fmin(self._ev_sum, self._ev_sum_max), self._ev_sum_min)
cs_long = ff_long +\
self._kp * self._ev +\
self._ki * self._ev_sum +\
self._kd * ev_dot
cs_long = np.fmax(np.fmin(cs_long, self._sat_long_max), self._sat_long_min)
self._ev_last = self._ev
# Lateral control
#temp = 0.0
#if np.abs(self._e_lat) > self._dead_band:
# temp = self._e_lat
a = self._feed_forward_lateral()
b = self._e_yaw
#c = np.arctan(self._ks * temp / (self._kv + v))
c = np.arctan(self._ks * self._e_lat / (self._kv + v))
d = a + b + c
cs_lat = max(min(d, self._sat_lat_max), self._sat_lat_min)
return cs_long, cs_lat
print("Compilling the Controller_v1 ...")
controller = Controller_v1(0.5, 0.1, 0.1, np.array([1., 2.]),
np.array([-1., 1.]), 2.0, 0.1, 2.5,
0.01, np.array([-np.pi/3., np.pi/3.]),
np.random.randn(100, 5))
controller.update_waypoints(np.random.randn(100, 5))
controller.reset_integral_derivative()
_ = controller.get_error()
_ = controller.get_instantaneous_setpoint()
controller._update_error(0., 0., 1.0, 0.)
_ = controller._feed_forward_longitudinal(2.5)
_ = controller._feed_forward_lateral()
_ = controller.calculate_control_signal(0.01, 0., 0., 1.0, 0.)
print("The Controller_v1 class has been compiled !")
#########################################################################################
#########################################################################################
# VERSION 2
spec_v2 = [('_kp', float64), ('_ki', float64), ('_kd', float64), ('_ff_params', float64[:]),
('_sat_long_max', float64), ('_sat_long_min', float64), ('_sat_state', int64),
('_ev', float64), ('_ev_last', float64), ('_ev_sum', float64),
('_ks', float64), ('_kv', float64), ('_l', float64),
('_dead_band', float64), ('_sat_lat_max', float64),
('_sat_lat_min', float64), ('_e_lat', float64), ('_e_yaw', float64),
('_waypoints', float64[:, :]), ('_closest_idx', int64),]
@jitclass(spec_v2)
class Controller_v2(object):
def __init__(self, kp, ki, kd, feed_forward_params, sat_long,\
ks, kv, length, lateral_dead_band, sat_lat,\
waypoints):
# In this version,
# The parameters of the longitudinal controller
self._kp = kp
self._ki = ki
self._kd = kd
self._ff_params = feed_forward_params
self._sat_long_max = max(sat_long[0], sat_long[1])
self._sat_long_min = min(sat_long[0], sat_long[1])
self._sat_state = 0 # The saturation state --> 0: No saturation || 1: cs > max || -1: cs < min
self._ev = 0.
self._ev_last = 0.
self._ev_sum = 0.
# The parameters of the lateral controller
self._ks = ks
self._kv = kv
self._l = length
self._dead_band = lateral_dead_band
self._sat_lat_max = max(sat_lat[0], sat_lat[1])
self._sat_lat_min = min(sat_lat[0], sat_lat[1])
self._e_lat = 0.
self._e_yaw = 0.
# Waypoints (n, 5) -> x, y, yaw, v, curvature
self._waypoints = waypoints
self._closest_idx = 0
def update_waypoints(self, new_waypoints):
self._waypoints = new_waypoints
def reset_integral_derivative(self):
self._ev_sum = 0.0
self._ev_last = 0.0
def get_error(self):
return self._ev, self._e_lat, self._e_yaw
def get_closest_index(self):
return self._closest_idx
def get_instantaneous_setpoint(self):
return self._waypoints[self._closest_idx]
def _update_error(self, x, y, v, yaw):
# Waypoints (n, 5) -> x, y, yaw, v, curvature
# Find the closest waypoint
self._closest_idx = np.argmin(np.sum(np.square(self._waypoints[:, :2] - np.array([x, y])), axis=-1))
# Find the yaw error
self._e_yaw = self._waypoints[self._closest_idx, 2] - yaw
self._e_yaw = (self._e_yaw + np.pi) % (2 * np.pi) - np.pi # Wrap the angle to [-pi, pi)
# Find the lateral or crosstrack error
if self._closest_idx == 0:
idx = 1
else:
idx = self._closest_idx
y2 = self._waypoints[idx, 1]
x2 = self._waypoints[idx, 0]
y1 = self._waypoints[idx - 1, 1]
x1 = self._waypoints[idx - 1, 0]
dy = y2 - y1
dx = x2 - x1
c = dx*y1 - dy*x1
self._e_lat = (dy*x + c - dx*y) \
/ (np.sqrt(dx**2 + dy**2) + 10**(-20))
# Find the velocity error
self._ev = self._waypoints[self._closest_idx, 3] - v
def _feed_forward_longitudinal(self, v):
if v < 0.:
return 0.
else:
return self._ff_params[0] * (1. - np.exp(- self._ff_params[1] * v))
def _feed_forward_lateral(self):
temp = self._l * self._waypoints[self._closest_idx, -1]
if np.abs(temp) > 1.:
temp = np.sign(temp)
return np.fmax(np.fmin(np.arcsin(temp), self._sat_lat_max), self._sat_lat_min) # From -pi/2 to pi/2
def calculate_control_signal(self, dt, x, y, v, yaw):
# Waypoints (n, 5) -> x, y, yaw, v, curvature
self._update_error(x, y, v, yaw)
# Longitudinal control
ff_long = self._feed_forward_longitudinal(self._waypoints[self._closest_idx, 3])
ev_dot = (self._ev - self._ev_last) / dt
temp = self._ev * dt
if temp >= 0:
if self._sat_state != 1:
self._ev_sum = self._ev_sum + temp
else:
if self._sat_state != -1:
self._ev_sum = self._ev_sum + temp
cs_long = ff_long +\
self._kp * self._ev +\
self._ki * self._ev_sum +\
self._kd * ev_dot
if cs_long > self._sat_long_max:
cs_long = self._sat_long_max
self._sat_state = 1
elif cs_long < self._sat_long_min:
cs_long = self._sat_long_min
self._sat_state = -1
else:
self._sat_state = 0
self._ev_last = self._ev
# Lateral control
temp = 0.0
if np.abs(self._e_lat) > self._dead_band:
temp = self._e_lat
cs_lat = self._feed_forward_lateral() +\
self._e_yaw +\
np.arctan(self._ks * temp / (self._kv + v))
cs_lat = (cs_lat + np.pi) % (2 * np.pi) - np.pi
cs_lat = np.fmax(np.fmin(cs_lat, self._sat_lat_max), self._sat_lat_min)
return cs_long, cs_lat
print("Compilling the Controller_v2 ...")
controller = Controller_v2(0.5, 0.1, 0.1, np.array([1., 2.]),
np.array([-1., 1.]), 2.0, 0.1, 2.5,
0.01, np.array([-np.pi/3., np.pi/3.]),
np.random.randn(100, 5))
controller.update_waypoints(np.random.randn(100, 5))
controller.reset_integral_derivative()
_ = controller.get_error()
_ = controller.get_instantaneous_setpoint()
controller._update_error(0., 0., 1.0, 0.)
_ = controller._feed_forward_longitudinal(2.5)
_ = controller._feed_forward_lateral()
_ = controller.calculate_control_signal(0.01, 0., 0., 1.0, 0.)
print("The Controller_v2 class has been compiled !")
| 38.082067
| 109
| 0.554713
| 1,726
| 12,529
| 3.6854
| 0.086906
| 0.039616
| 0.046219
| 0.047005
| 0.872819
| 0.863543
| 0.846722
| 0.844679
| 0.81135
| 0.799403
| 0
| 0.037507
| 0.295634
| 12,529
| 328
| 110
| 38.198171
| 0.683286
| 0.10759
| 0
| 0.743363
| 0
| 0
| 0.044826
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.088496
| false
| 0
| 0.013274
| 0.026549
| 0.172566
| 0.017699
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
72f04dc68856472f7cae821ebcce92e3beeaf82a
| 20,564
|
py
|
Python
|
opsgenie_swagger/api/escalation_api.py
|
Logicworks/opsgenie-python-sdk
|
244c4c40ddcc25e70df5ba4425ab8d7c8da59c18
|
[
"Apache-2.0"
] | null | null | null |
opsgenie_swagger/api/escalation_api.py
|
Logicworks/opsgenie-python-sdk
|
244c4c40ddcc25e70df5ba4425ab8d7c8da59c18
|
[
"Apache-2.0"
] | null | null | null |
opsgenie_swagger/api/escalation_api.py
|
Logicworks/opsgenie-python-sdk
|
244c4c40ddcc25e70df5ba4425ab8d7c8da59c18
|
[
"Apache-2.0"
] | 1
|
2020-11-07T11:27:13.000Z
|
2020-11-07T11:27:13.000Z
|
# coding: utf-8
"""
OpsGenie REST API
OpsGenie OpenAPI Specification # noqa: E501
OpenAPI spec version: 2.0.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from opsgenie_swagger.api_client import ApiClient
class EscalationApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def create_escalation(self, body, **kwargs): # noqa: E501
"""Create Escalation # noqa: E501
Creates a new escalation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_escalation(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateEscalationPayload body: Request payload of created escalation (required)
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.create_escalation_with_http_info(body, **kwargs) # noqa: E501
else:
(data) = self.create_escalation_with_http_info(body, **kwargs) # noqa: E501
return data
def create_escalation_with_http_info(self, body, **kwargs): # noqa: E501
"""Create Escalation # noqa: E501
Creates a new escalation # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.create_escalation_with_http_info(body, async_req=True)
>>> result = thread.get()
:param async_req bool
:param CreateEscalationPayload body: Request payload of created escalation (required)
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_escalation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'body' is set
if ('body' not in params or
params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_escalation`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v2/escalations', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_escalation(self, identifier, **kwargs): # noqa: E501
"""Delete Escalation # noqa: E501
Deletes an escalation using escalation 'id' or 'name' # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_escalation(identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str identifier: Identifier of escalation which could be escalation 'id' or 'name' (required)
:param str identifier_type: Type of the identifier that is provided as an in-line parameter. Possible values are 'id' or 'name'
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.delete_escalation_with_http_info(identifier, **kwargs) # noqa: E501
else:
(data) = self.delete_escalation_with_http_info(identifier, **kwargs) # noqa: E501
return data
def delete_escalation_with_http_info(self, identifier, **kwargs): # noqa: E501
"""Delete Escalation # noqa: E501
Deletes an escalation using escalation 'id' or 'name' # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.delete_escalation_with_http_info(identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str identifier: Identifier of escalation which could be escalation 'id' or 'name' (required)
:param str identifier_type: Type of the identifier that is provided as an in-line parameter. Possible values are 'id' or 'name'
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['identifier', 'identifier_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_escalation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'identifier' is set
if ('identifier' not in params or
params['identifier'] is None):
raise ValueError("Missing the required parameter `identifier` when calling `delete_escalation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'identifier' in params:
path_params['identifier'] = params['identifier'] # noqa: E501
query_params = []
if 'identifier_type' in params:
query_params.append(('identifierType', params['identifier_type'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v2/escalations/{identifier}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_escalation(self, identifier, **kwargs): # noqa: E501
"""Get Escalation # noqa: E501
Returns escalation with given 'id' or 'name' # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_escalation(identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str identifier: Identifier of escalation which could be escalation 'id' or 'name' (required)
:param str identifier_type: Type of the identifier that is provided as an in-line parameter. Possible values are 'id' or 'name'
:return: GetEscalationResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.get_escalation_with_http_info(identifier, **kwargs) # noqa: E501
else:
(data) = self.get_escalation_with_http_info(identifier, **kwargs) # noqa: E501
return data
def get_escalation_with_http_info(self, identifier, **kwargs): # noqa: E501
"""Get Escalation # noqa: E501
Returns escalation with given 'id' or 'name' # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.get_escalation_with_http_info(identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str identifier: Identifier of escalation which could be escalation 'id' or 'name' (required)
:param str identifier_type: Type of the identifier that is provided as an in-line parameter. Possible values are 'id' or 'name'
:return: GetEscalationResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['identifier', 'identifier_type'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_escalation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'identifier' is set
if ('identifier' not in params or
params['identifier'] is None):
raise ValueError("Missing the required parameter `identifier` when calling `get_escalation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'identifier' in params:
path_params['identifier'] = params['identifier'] # noqa: E501
query_params = []
if 'identifier_type' in params:
query_params.append(('identifierType', params['identifier_type'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v2/escalations/{identifier}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='GetEscalationResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def list_escalations(self, **kwargs): # noqa: E501
"""List Escalations # noqa: E501
Returns list of escalations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_escalations(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ListEscalationsResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.list_escalations_with_http_info(**kwargs) # noqa: E501
else:
(data) = self.list_escalations_with_http_info(**kwargs) # noqa: E501
return data
def list_escalations_with_http_info(self, **kwargs): # noqa: E501
"""List Escalations # noqa: E501
Returns list of escalations # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.list_escalations_with_http_info(async_req=True)
>>> result = thread.get()
:param async_req bool
:return: ListEscalationsResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = [] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method list_escalations" % key
)
params[key] = val
del params['kwargs']
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v2/escalations', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ListEscalationsResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_escalation(self, identifier, **kwargs): # noqa: E501
"""Update Escalation (Partial) # noqa: E501
Updates the escalation using escalation 'id' or 'name' # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_escalation(identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str identifier: Identifier of escalation which could be escalation 'id' or 'name' (required)
:param str identifier_type: Type of the identifier that is provided as an in-line parameter. Possible values are 'id' or 'name'
:param UpdateEscalationPayload body: Request payload of update escalation
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.update_escalation_with_http_info(identifier, **kwargs) # noqa: E501
else:
(data) = self.update_escalation_with_http_info(identifier, **kwargs) # noqa: E501
return data
def update_escalation_with_http_info(self, identifier, **kwargs): # noqa: E501
"""Update Escalation (Partial) # noqa: E501
Updates the escalation using escalation 'id' or 'name' # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.update_escalation_with_http_info(identifier, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str identifier: Identifier of escalation which could be escalation 'id' or 'name' (required)
:param str identifier_type: Type of the identifier that is provided as an in-line parameter. Possible values are 'id' or 'name'
:param UpdateEscalationPayload body: Request payload of update escalation
:return: SuccessResponse
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['identifier', 'identifier_type', 'body'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_escalation" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'identifier' is set
if ('identifier' not in params or
params['identifier'] is None):
raise ValueError("Missing the required parameter `identifier` when calling `update_escalation`") # noqa: E501
collection_formats = {}
path_params = {}
if 'identifier' in params:
path_params['identifier'] = params['identifier'] # noqa: E501
query_params = []
if 'identifier_type' in params:
query_params.append(('identifierType', params['identifier_type'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.select_header_accept(
['application/json']) # noqa: E501
# Authentication setting
auth_settings = ['GenieKey'] # noqa: E501
return self.api_client.call_api(
'/v2/escalations/{identifier}', 'PATCH',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='SuccessResponse', # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.698842
| 135
| 0.618022
| 2,293
| 20,564
| 5.325338
| 0.077628
| 0.046515
| 0.02293
| 0.029482
| 0.94898
| 0.940709
| 0.93383
| 0.923675
| 0.919744
| 0.907297
| 0
| 0.015644
| 0.294398
| 20,564
| 517
| 136
| 39.775629
| 0.825913
| 0.361992
| 0
| 0.797794
| 1
| 0
| 0.185857
| 0.038103
| 0
| 0
| 0
| 0
| 0
| 1
| 0.040441
| false
| 0
| 0.014706
| 0
| 0.113971
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
f403c6d751751a01b1249b753d884db8077771b3
| 257
|
py
|
Python
|
timetable/tests/__init__.py
|
kyaranusa/School-Management-Systems
|
d6cd71037fe46c08feff32f42af61f56eb25a7c7
|
[
"MIT"
] | null | null | null |
timetable/tests/__init__.py
|
kyaranusa/School-Management-Systems
|
d6cd71037fe46c08feff32f42af61f56eb25a7c7
|
[
"MIT"
] | null | null | null |
timetable/tests/__init__.py
|
kyaranusa/School-Management-Systems
|
d6cd71037fe46c08feff32f42af61f56eb25a7c7
|
[
"MIT"
] | 1
|
2020-11-17T03:25:10.000Z
|
2020-11-17T03:25:10.000Z
|
# See LICENSE file for full copyright and licensing details.
# ----------------------------------------------------------
# A Module to Assignment Management System
# ----------------------------------------------------------
from . import test_timetable
| 32.125
| 60
| 0.420233
| 19
| 257
| 5.631579
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105058
| 257
| 7
| 61
| 36.714286
| 0.465217
| 0.844358
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
f4278768f99be9886f7517be89f129dfde2aece9
| 49,284
|
py
|
Python
|
test/units/modules/network/nuage/test_nuage_vspk.py
|
Container-Projects/ansible-provider-docs
|
100b695b0b0c4d8d08af362069557ffc735d0d7e
|
[
"PSF-2.0",
"BSD-2-Clause",
"MIT"
] | 37
|
2017-08-15T15:02:43.000Z
|
2021-07-23T03:44:31.000Z
|
test/units/modules/network/nuage/test_nuage_vspk.py
|
Container-Projects/ansible-provider-docs
|
100b695b0b0c4d8d08af362069557ffc735d0d7e
|
[
"PSF-2.0",
"BSD-2-Clause",
"MIT"
] | 12
|
2018-01-10T05:25:25.000Z
|
2021-11-28T06:55:48.000Z
|
test/units/modules/network/nuage/test_nuage_vspk.py
|
Container-Projects/ansible-provider-docs
|
100b695b0b0c4d8d08af362069557ffc735d0d7e
|
[
"PSF-2.0",
"BSD-2-Clause",
"MIT"
] | 49
|
2017-08-15T09:52:13.000Z
|
2022-03-21T17:11:54.000Z
|
# -*- coding: utf-8 -*-
# (c) 2017, Nokia
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import sys
from nose.plugins.skip import SkipTest
if not(sys.version_info[0] == 2 and sys.version_info[1] >= 7):
raise SkipTest('Nuage Ansible modules requires Python 2.7')
try:
from vspk import v5_0 as vsdk
from bambou.exceptions import BambouHTTPError
from ansible.modules.network.nuage import nuage_vspk
except ImportError:
raise SkipTest('Nuage Ansible modules requires the vspk and bambou python libraries')
from ansible.compat.tests.mock import patch
from units.modules.utils import set_module_args, AnsibleExitJson, AnsibleFailJson
from .nuage_module import MockNuageConnection, TestNuageModule
_LOOP_COUNTER = 0
class TestNuageVSPKModule(TestNuageModule):
def setUp(self):
super(TestNuageVSPKModule, self).setUp()
self.patches = []
def enterprises_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
if 'unknown' in filter:
return []
result = [vsdk.NUEnterprise(id='enterprise-id', name='test-enterprise')]
if filter == '' or filter == 'name == "test%"':
result.append(vsdk.NUEnterprise(id='enterprise-id-2', name='test-enterprise-2'))
return result
self.enterprises_get_mock = patch('vspk.v5_0.fetchers.NUEnterprisesFetcher.get', new=enterprises_get)
self.enterprises_get_mock.start()
self.patches.append(self.enterprises_get_mock)
def enterprises_get_first(self, filter=None, order_by=None, group_by=None, query_parameters=None, commit=False, callback=None, **kwargs):
group_by = [] if group_by is None else group_by
if filter == 'name == "test-enterprise-create"' or 'unknown' in filter:
return None
return vsdk.NUEnterprise(id='enterprise-id', name='test-enterprise')
self.enterprises_get_first_mock = patch('vspk.v5_0.fetchers.NUEnterprisesFetcher.get_first', new=enterprises_get_first)
self.enterprises_get_first_mock.start()
self.patches.append(self.enterprises_get_first_mock)
def enterprise_delete(self, response_choice=1, callback=None, **kwargs):
pass
self.enterprise_delete_mock = patch('vspk.v5_0.NUEnterprise.delete', new=enterprise_delete)
self.enterprise_delete_mock.start()
self.patches.append(self.enterprise_delete_mock)
def enterprise_fetch(self, callback=None, **kwargs):
self.id = 'enterprise-id'
self.name = 'test-enterprise'
self.enterprise_fetch_mock = patch('vspk.v5_0.NUEnterprise.fetch', new=enterprise_fetch)
self.enterprise_fetch_mock.start()
self.patches.append(self.enterprise_fetch_mock)
def enterprise_save(self, response_choice=None, callback=None, **kwargs):
self.id = 'enterprise-id'
self.name = 'test-enterprise-update'
self.enterprise_save_mock = patch('vspk.v5_0.NUEnterprise.save', new=enterprise_save)
self.enterprise_save_mock.start()
self.patches.append(self.enterprise_save_mock)
def enterprise_create_child(self, nurest_object, response_choice=None, callback=None, commit=True, **kwargs):
nurest_object.id = 'user-id-create'
return nurest_object
self.enterprise_create_child_mock = patch('vspk.v5_0.NUEnterprise.create_child', new=enterprise_create_child)
self.enterprise_create_child_mock.start()
self.patches.append(self.enterprise_create_child_mock)
def me_create_child(self, nurest_object, response_choice=None, callback=None, commit=True, **kwargs):
nurest_object.id = 'enterprise-id-create'
return nurest_object
self.me_create_child_mock = patch('vspk.v5_0.NUMe.create_child', new=me_create_child)
self.me_create_child_mock.start()
self.patches.append(self.me_create_child_mock)
def user_fetch(self, callback=None, **kwargs):
self.id = 'user-id'
self.first_name = 'John'
self.last_name = 'Doe'
self.email = 'john.doe@localhost'
self.user_name = 'johndoe'
self.password = ''
self.user_fetch_mock = patch('vspk.v5_0.NUUser.fetch', new=user_fetch)
self.user_fetch_mock.start()
self.patches.append(self.user_fetch_mock)
def user_save(self, response_choice=None, callback=None, **kwargs):
self.id = 'user-id'
self.first_name = 'John'
self.last_name = 'Doe'
self.email = 'john.doe@localhost'
self.user_name = 'johndoe'
self.password = ''
self.user_save_mock = patch('vspk.v5_0.NUUser.save', new=user_save)
self.user_save_mock.start()
self.patches.append(self.user_save_mock)
def groups_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return []
self.groups_get_mock = patch('vspk.v5_0.fetchers.NUGroupsFetcher.get', new=groups_get)
self.groups_get_mock.start()
self.patches.append(self.groups_get_mock)
def group_fetch(self, callback=None, **kwargs):
self.id = 'group-id'
self.name = 'group'
self.group_fetch_mock = patch('vspk.v5_0.NUGroup.fetch', new=group_fetch)
self.group_fetch_mock.start()
self.patches.append(self.group_fetch_mock)
def group_assign(self, objects, nurest_object_type, callback=None, commit=True, **kwargs):
self.id = 'group-id'
self.name = 'group'
self.group_assign_mock = patch('vspk.v5_0.NUGroup.assign', new=group_assign)
self.group_assign_mock.start()
self.patches.append(self.group_assign_mock)
def job_fetch(self, callback=None, **kwargs):
global _LOOP_COUNTER
self.id = 'job-id'
self.command = 'EXPORT'
self.status = 'RUNNING'
if _LOOP_COUNTER > 1:
self.status = 'SUCCESS'
_LOOP_COUNTER += 1
self.job_fetch_mock = patch('vspk.v5_0.NUJob.fetch', new=job_fetch)
self.job_fetch_mock.start()
self.patches.append(self.job_fetch_mock)
def tearDown(self):
super(TestNuageVSPKModule, self).tearDown()
for mock in self.patches:
mock.stop()
def test_certificate_auth(self):
set_module_args(
args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise'
},
'auth': {
'api_username': 'csproot',
'api_certificate': '/dummy/location/certificate.pem',
'api_key': '/dummy/location/key.pem',
'api_enterprise': 'csp',
'api_url': 'https://localhost:8443',
'api_version': 'v5_0'
}
}
)
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
def test_command_find_by_property(self):
set_module_args(args={
'type': 'Enterprise',
'command': 'find',
'properties': {
'name': 'test-enterprise'
}
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
def test_command_find_by_filter(self):
set_module_args(args={
'type': 'Enterprise',
'command': 'find',
'match_filter': 'name == "test%"'
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 2)
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
self.assertEqual(result['entities'][1]['name'], 'test-enterprise-2')
def test_command_find_by_id(self):
set_module_args(args={
'id': 'enterprise-id',
'type': 'Enterprise',
'command': 'find'
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
def test_command_find_all(self):
set_module_args(args={
'type': 'Enterprise',
'command': 'find'
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 2)
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
self.assertEqual(result['entities'][1]['name'], 'test-enterprise-2')
def test_command_change_password(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'parent_id': 'enterprise-id',
'parent_type': 'Enterprise',
'command': 'change_password',
'properties': {
'password': 'test'
}
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], True)
self.assertEqual(result['id'], 'user-id')
self.assertEqual(result['entities'][0]['firstName'], 'John')
self.assertEqual(result['entities'][0]['lastName'], 'Doe')
self.assertEqual(result['entities'][0]['email'], 'john.doe@localhost')
self.assertEqual(result['entities'][0]['userName'], 'johndoe')
self.assertEqual(result['entities'][0]['password'], '')
def test_command_wait_for_job(self):
set_module_args(args={
'id': 'job-id',
'type': 'Job',
'command': 'wait_for_job',
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], True)
self.assertEqual(result['id'], 'job-id')
self.assertEqual(result['entities'][0]['command'], 'EXPORT')
self.assertEqual(result['entities'][0]['status'], 'SUCCESS')
def test_command_get_csp_enterprise(self):
set_module_args(args={
'type': 'Enterprise',
'command': 'get_csp_enterprise'
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
def test_state_present_existing(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'id': 'enterprise-id',
'name': 'test-enterprise'
}
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
def test_state_present_existing_filter(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'match_filter': 'name == "test-enterprise"'
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise')
def test_state_present_create(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise-create'
}
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], True)
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id-create')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise-create')
def test_state_present_update(self):
set_module_args(args={
'id': 'enterprise-id',
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise-update'
}
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], True)
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'enterprise-id')
self.assertEqual(result['entities'][0]['name'], 'test-enterprise-update')
def test_state_present_member_existing(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'parent_id': 'group-id',
'parent_type': 'Group',
'state': 'present'
})
def users_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return [vsdk.NUUser(id='user-id'), vsdk.NUUser(id='user-id-2')]
with self.assertRaises(AnsibleExitJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get', users_get):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
def test_state_present_member_missing(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'parent_id': 'group-id',
'parent_type': 'Group',
'state': 'present'
})
def users_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return []
with self.assertRaises(AnsibleExitJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get', users_get):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], True)
self.assertEqual(len(result['entities']), 1)
self.assertEqual(result['id'], 'user-id')
def test_state_present_children_update(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise'
},
'children': [
{
'id': 'user-id',
'type': 'User',
'match_filter': 'userName == "johndoe"',
'properties': {
'user_name': 'johndoe-changed'
}
}
]
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertEqual(result['changed'], True)
self.assertEqual(len(result['entities']), 2)
def test_state_present_children_create(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise-create'
},
'children': [
{
'type': 'User',
'properties': {
'user_name': 'johndoe-new'
}
}
]
})
def users_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return []
with self.assertRaises(AnsibleExitJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get', users_get):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['changed'])
self.assertEqual(len(result['entities']), 2)
def test_state_present_children_member_missing(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'unkown-test-enterprise'
},
'children': [
{
'type': 'Group',
'properties': {
'name': 'unknown-group'
},
'children': [
{
'id': 'user-id',
'type': 'User'
}
]
}
]
})
def users_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return []
with self.assertRaises(AnsibleExitJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get', users_get):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['changed'])
self.assertEqual(len(result['entities']), 3)
def test_state_absent(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'absent',
'properties': {
'name': 'test-enterprise'
}
})
with self.assertRaises(AnsibleExitJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['changed'])
def test_state_absent_member(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'parent_id': 'group-id',
'parent_type': 'Group',
'state': 'absent'
})
def users_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return [vsdk.NUUser(id='user-id')]
with self.assertRaises(AnsibleExitJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get', users_get):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['changed'])
def test_exception_session(self):
set_module_args(args={
'id': 'enterprise-id',
'type': 'Enterprise',
'command': 'find'
})
def failed_session_start(self):
raise BambouHTTPError(MockNuageConnection(status_code='401', reason='Unauthorized', errors={}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUVSDSession.start', new=failed_session_start):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Unable to connect to the API URL with given username, password and enterprise: [HTTP 401(Unauthorized)] {}')
def test_exception_find_parent(self):
set_module_args(args={
'type': 'User',
'parent_id': 'group-id',
'parent_type': 'Group',
'command': 'find'
})
def group_failed_fetch(self, callback=None, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='404', reason='Not Found', errors={'description': 'Entity not found'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUGroup.fetch', group_failed_fetch):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Failed to fetch the specified parent: [HTTP 404(Not Found)] {'description': 'Entity not found'}")
def test_exception_find_entities_id(self):
set_module_args(args={
'id': 'enterprise-id',
'type': 'Enterprise',
'command': 'find'
})
def enterprise_failed_fetch(self, callback=None, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='404', reason='Not Found', errors={'description': 'Entity not found'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUEnterprise.fetch', enterprise_failed_fetch):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Failed to fetch the specified entity by ID: [HTTP 404(Not Found)] {'description': 'Entity not found'}")
def test_excption_find_entities_property(self):
set_module_args(args={
'type': 'Enterprise',
'match_filter': 'name == "enterprise-id"',
'command': 'find'
})
def enterprises_failed_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
raise BambouHTTPError(MockNuageConnection(status_code='404', reason='Not Found', errors={'description': 'Entity not found'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.fetchers.NUEnterprisesFetcher.get', enterprises_failed_get):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Unable to find matching entries')
def test_exception_find_entity_id(self):
set_module_args(args={
'id': 'enterprise-id',
'type': 'Enterprise',
'state': 'present'
})
def enterprise_failed_fetch(self, callback=None, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='404', reason='Not Found', errors={'description': 'Entity not found'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUEnterprise.fetch', enterprise_failed_fetch):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Failed to fetch the specified entity by ID: [HTTP 404(Not Found)] {'description': 'Entity not found'}")
def test_exception_find_entity_property(self):
set_module_args(args={
'type': 'Enterprise',
'match_filter': 'name == "enterprise-id"',
'state': 'absent'
})
def enterprises_failed_get_first(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
raise BambouHTTPError(MockNuageConnection(status_code='404', reason='Not Found', errors={'description': 'Entity not found'}))
with self.assertRaises(AnsibleExitJson) as exc:
with patch('vspk.v5_0.fetchers.NUEnterprisesFetcher.get_first', enterprises_failed_get_first):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertFalse(result['changed'])
def test_exception_get_csp_enterprise(self):
set_module_args(args={
'type': 'Enterprise',
'command': 'get_csp_enterprise'
})
def enterprise_failed_fetch(self, callback=None, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='404', reason='Not Found', errors={'description': 'Entity not found'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUEnterprise.fetch', enterprise_failed_fetch):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Unable to fetch CSP enterprise: [HTTP 404(Not Found)] {'description': 'Entity not found'}")
def test_exception_assign_member(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'parent_id': 'group-id',
'parent_type': 'Group',
'state': 'present'
})
def users_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return []
def group_assign(self, objects, nurest_object_type, callback=None, commit=True, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='500', reason='Server exception', errors={'description': 'Unable to assign member'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get', users_get):
with patch('vspk.v5_0.NUGroup.assign', new=group_assign):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Unable to assign entity as a member: [HTTP 500(Server exception)] {'description': 'Unable to assign member'}")
def test_exception_unassign_member(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'parent_id': 'group-id',
'parent_type': 'Group',
'state': 'absent'
})
def users_get(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return [vsdk.NUUser(id='user-id'), vsdk.NUUser(id='user-id-2')]
def group_assign(self, objects, nurest_object_type, callback=None, commit=True, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='500', reason='Server exception', errors={'description': 'Unable to remove member'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get', users_get):
with patch('vspk.v5_0.NUGroup.assign', new=group_assign):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Unable to remove entity as a member: [HTTP 500(Server exception)] {'description': 'Unable to remove member'}")
def test_exception_create_entity(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise-create'
}
})
def me_create_child(self, nurest_object, response_choice=None, callback=None, commit=True, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='500', reason='Server exception', errors={'description': 'Unable to create entity'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUMe.create_child', me_create_child):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Unable to create entity: [HTTP 500(Server exception)] {'description': 'Unable to create entity'}")
def test_exception_save_entity(self):
set_module_args(args={
'id': 'enterprise-id',
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'new-enterprise-name'
}
})
def enterprise_save(self, response_choice=None, callback=None, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='500', reason='Server exception', errors={'description': 'Unable to save entity'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUEnterprise.save', enterprise_save):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Unable to update entity: [HTTP 500(Server exception)] {'description': 'Unable to save entity'}")
def test_exception_delete_entity(self):
set_module_args(args={
'id': 'enterprise-id',
'type': 'Enterprise',
'state': 'absent'
})
def enterprise_delete(self, response_choice=1, callback=None, **kwargs):
raise BambouHTTPError(MockNuageConnection(status_code='500', reason='Server exception', errors={'description': 'Unable to delete entity'}))
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUEnterprise.delete', enterprise_delete):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Unable to delete entity: [HTTP 500(Server exception)] {'description': 'Unable to delete entity'}")
def test_exception_wait_for_job(self):
set_module_args(args={
'id': 'job-id',
'type': 'Job',
'command': 'wait_for_job'
})
def job_fetch(self, callback=None, **kwargs):
global _LOOP_COUNTER
self.id = 'job-id'
self.command = 'EXPORT'
self.status = 'RUNNING'
if _LOOP_COUNTER > 1:
self.status = 'ERROR'
_LOOP_COUNTER += 1
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.NUJob.fetch', new=job_fetch):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "Job ended in an error")
def test_fail_auth(self):
set_module_args(
args={
'type': 'Enterprise',
'command': 'find',
'auth': {
'api_username': 'csproot',
'api_enterprise': 'csp',
'api_url': 'https://localhost:8443',
'api_version': 'v5_0'
}
}
)
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Missing api_password or api_certificate and api_key parameter in auth')
def test_fail_version(self):
set_module_args(
args={
'type': 'Enterprise',
'command': 'find',
'auth': {
'api_username': 'csproot',
'api_password': 'csproot',
'api_enterprise': 'csp',
'api_url': 'https://localhost:8443',
'api_version': 'v1_0'
}
}
)
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'vspk is required for this module, or the API version specified does not exist.')
def test_fail_type(self):
set_module_args(args={
'type': 'Unknown',
'command': 'find'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Unrecognised type specified')
def test_fail_parent_type(self):
set_module_args(args={
'type': 'User',
'parent_id': 'unkown-id',
'parent_type': 'Unknown',
'command': 'find'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Unrecognised parent type specified')
def test_fail_parent_child(self):
set_module_args(args={
'type': 'Enterprise',
'parent_id': 'user-id',
'parent_type': 'User',
'command': 'find'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Specified parent is not a valid parent for the specified type')
def test_fail_no_parent(self):
set_module_args(args={
'type': 'Group',
'command': 'find'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'No parent specified and root object is not a parent for the type')
def test_fail_present_member(self):
set_module_args(args={
'type': 'User',
'match_filter': 'name == "test-user"',
'parent_id': 'group-id',
'parent_type': 'Group',
'state': 'present'
})
def users_get_first(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return None
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get_first', users_get_first):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Trying to assign an entity that does not exist', result)
def test_fail_change_password(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'command': 'change_password',
'properties': {}
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'command is change_password but the following are missing: password property')
def test_fail_change_password_non_user(self):
set_module_args(args={
'id': 'group-id',
'type': 'Group',
'command': 'change_password',
'properties': {
'password': 'new-password'
}
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Entity does not have a password property')
def test_fail_command_find(self):
set_module_args(args={
'type': 'Enterprise',
'command': 'find',
'properties': {
'id': 'unknown-enterprise-id',
'name': 'unkown-enterprise'
}
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Unable to find matching entries')
def test_fail_children_type(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise-create'
},
'children': [
{
'properties': {
'user_name': 'johndoe-new'
}
}
]
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Child type unspecified')
def test_fail_children_mandatory(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise-create'
},
'children': [
{
'type': 'User'
}
]
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Child ID or properties unspecified')
def test_fail_children_unknown(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'properties': {
'name': 'test-enterprise-create'
},
'children': [
{
'id': 'unkown-id',
'type': 'Unkown'
}
]
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Unrecognised child type specified')
def test_fail_children_parent(self):
set_module_args(args={
'id': 'group-id',
'type': 'Group',
'state': 'present',
'children': [
{
'type': 'User',
'properties': {
'name': 'test-user'
}
}
]
})
def users_get_first(self, filter=None, order_by=None, group_by=None, page=None, page_size=None, query_parameters=None, commit=True,
callback=None, **kwargs):
group_by = [] if group_by is None else group_by
return None
with self.assertRaises(AnsibleFailJson) as exc:
with patch('vspk.v5_0.fetchers.NUUsersFetcher.get_first', users_get_first):
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Trying to assign a child that does not exist')
def test_fail_children_fetcher(self):
set_module_args(args={
'id': 'group-id',
'type': 'Group',
'state': 'present',
'children': [
{
'type': 'Enterprise',
'properties': {
'name': 'test-enterprise'
}
}
]
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Unable to find a fetcher for child, and no ID specified.')
def test_fail_has_changed(self):
set_module_args(args={
'id': 'user-id',
'type': 'User',
'state': 'present',
'properties': {
'user_name': 'changed-user',
'fake': 'invalid-property',
'password': 'hidden-property'
}
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'Property fake is not valid for this type of entity')
def test_input_auth_username(self):
set_module_args(
args={
'type': 'Enterprise',
'command': 'find',
'auth': {
'api_password': 'csproot',
'api_enterprise': 'csp',
'api_url': 'https://localhost:8443',
'api_version': 'v5_0'
}
}
)
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'missing required arguments: api_username')
def test_input_auth_enterprise(self):
set_module_args(
args={
'type': 'Enterprise',
'command': 'find',
'auth': {
'api_username': 'csproot',
'api_password': 'csproot',
'api_url': 'https://localhost:8443',
'api_version': 'v5_0'
}
}
)
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'missing required arguments: api_enterprise')
def test_input_auth_url(self):
set_module_args(
args={
'type': 'Enterprise',
'command': 'find',
'auth': {
'api_username': 'csproot',
'api_password': 'csproot',
'api_enterprise': 'csp',
'api_version': 'v5_0'
}
}
)
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'missing required arguments: api_url')
def test_input_auth_version(self):
set_module_args(
args={
'type': 'Enterprise',
'command': 'find',
'auth': {
'api_username': 'csproot',
'api_password': 'csproot',
'api_enterprise': 'csp',
'api_url': 'https://localhost:8443',
}
}
)
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], 'missing required arguments: api_version')
def test_input_exclusive(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
'command': 'find'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "parameters are mutually exclusive: ['command', 'state']")
def test_input_require_both_parent_id(self):
set_module_args(args={
'type': 'User',
'command': 'find',
'parent_type': 'Enterprise'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "parameters are required together: ['parent_id', 'parent_type']")
def test_input_require_both_parent_type(self):
set_module_args(args={
'type': 'User',
'command': 'find',
'parent_id': 'enterprise-id'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "parameters are required together: ['parent_id', 'parent_type']")
def test_input_require_on_off(self):
set_module_args(args={
'type': 'Enterprise'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "one of the following is required: command,state")
def test_input_require_if_present(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'present',
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "state is present but the following are missing: id,properties,match_filter")
def test_input_require_if_absent(self):
set_module_args(args={
'type': 'Enterprise',
'state': 'absent',
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "state is absent but the following are missing: id,properties,match_filter")
def test_input_require_if_change_password_id(self):
set_module_args(args={
'type': 'User',
'command': 'change_password',
'properties': {
'password': 'dummy-password'
}
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "command is change_password but the following are missing: id")
def test_input_require_if_change_password_properties(self):
set_module_args(args={
'type': 'User',
'command': 'change_password',
'id': 'user-id'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "command is change_password but the following are missing: properties")
def test_input_require_if_wait_for_job_id(self):
set_module_args(args={
'type': 'Job',
'command': 'wait_for_job'
})
with self.assertRaises(AnsibleFailJson) as exc:
nuage_vspk.main()
result = exc.exception.args[0]
self.assertTrue(result['failed'])
self.assertEqual(result['msg'], "command is wait_for_job but the following are missing: id")
| 34.731501
| 152
| 0.571707
| 5,215
| 49,284
| 5.233749
| 0.056184
| 0.050011
| 0.059244
| 0.037994
| 0.858394
| 0.822269
| 0.79065
| 0.761999
| 0.739979
| 0.723492
| 0
| 0.008518
| 0.302066
| 49,284
| 1,418
| 153
| 34.755994
| 0.784981
| 0.013412
| 0
| 0.70572
| 0
| 0.009225
| 0.206271
| 0.030943
| 0
| 0
| 0
| 0
| 0.190959
| 1
| 0.090406
| false
| 0.026753
| 0.008303
| 0
| 0.114391
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be2f8ec6c41d630744c2c8951a1d449607ca6152
| 37
|
py
|
Python
|
matriz.py
|
noudas/treinopython1
|
88379a2f1a564de518e33ea29df14f7ce65583d8
|
[
"0BSD"
] | null | null | null |
matriz.py
|
noudas/treinopython1
|
88379a2f1a564de518e33ea29df14f7ce65583d8
|
[
"0BSD"
] | null | null | null |
matriz.py
|
noudas/treinopython1
|
88379a2f1a564de518e33ea29df14f7ce65583d8
|
[
"0BSD"
] | null | null | null |
x = [[1,2],[3,4]]
print(x[1][0])
| 9.25
| 18
| 0.351351
| 9
| 37
| 1.444444
| 0.777778
| 0.307692
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.206897
| 0.216216
| 37
| 3
| 19
| 12.333333
| 0.241379
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.5
| 1
| 1
| 1
| null | 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 8
|
be68be0c02989998325ad25b9d183874eb510909
| 527
|
py
|
Python
|
aiopoke/__main__.py
|
beastmatser/aiopokeapi
|
6ffe10bf8db0b6349cabf5b5b01b738214f805d0
|
[
"MIT"
] | 3
|
2021-10-03T13:49:47.000Z
|
2022-03-31T03:12:30.000Z
|
aiopoke/__main__.py
|
beastmatser/aiopokeapi
|
6ffe10bf8db0b6349cabf5b5b01b738214f805d0
|
[
"MIT"
] | 3
|
2022-01-18T07:31:08.000Z
|
2022-01-18T07:32:09.000Z
|
aiopoke/__main__.py
|
beastmatser/aiopokeapi
|
6ffe10bf8db0b6349cabf5b5b01b738214f805d0
|
[
"MIT"
] | 1
|
2022-01-19T12:35:09.000Z
|
2022-01-19T12:35:09.000Z
|
from aiopoke import version_info
def main():
print(
f"""
\033[1;32m
.d8b. d888888b .d88b. d8888b. .d88b. db dD d88888b
d8' `8b `88' .8P Y8. 88 `8D .8P Y8. 88 ,8P' 88'
88ooo88 88 88 88 88oodD' 88 88 88,8P 88ooooo
88~~~88 88 88 88 88~~~ 88 88 88`8b 88~~~~~
88 88 .88. `8b d8' 88 `8b d8' 88 `88. 88.
YP YP Y888888P `Y88P' 88 `Y88P' YP YD Y88888P\033[0m\t\033[1;34mVersion: {version_info}\033[0m
"""
)
if __name__ == "__main__":
main()
| 26.35
| 107
| 0.529412
| 87
| 527
| 3.091954
| 0.436782
| 0.252788
| 0.267658
| 0.208178
| 0.111524
| 0.066915
| 0.066915
| 0.066915
| 0
| 0
| 0
| 0.372549
| 0.322581
| 527
| 19
| 108
| 27.736842
| 0.380952
| 0
| 0
| 0
| 0
| 0.133333
| 0.787476
| 0.104364
| 0
| 0
| 0
| 0
| 0
| 1
| 0.066667
| true
| 0
| 0.066667
| 0
| 0.133333
| 0.066667
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
be75af31c82f7837c16663da5e8394ca545a1392
| 15,413
|
py
|
Python
|
ade25/widgets/widgets/summary/listing.py
|
ade25/ade25.widgets
|
272cf1c74a3b97f4e25161c50f178ebe3c1a70d1
|
[
"MIT"
] | null | null | null |
ade25/widgets/widgets/summary/listing.py
|
ade25/ade25.widgets
|
272cf1c74a3b97f4e25161c50f178ebe3c1a70d1
|
[
"MIT"
] | null | null | null |
ade25/widgets/widgets/summary/listing.py
|
ade25/ade25.widgets
|
272cf1c74a3b97f4e25161c50f178ebe3c1a70d1
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""Module providing content listing widgets"""
import uuid as uuid_tool
from Acquisition import aq_inner, aq_parent
from Products.CMFCore.interfaces import ISiteRoot
from Products.Five import BrowserView
from ade25.panelpage.page import IPage
from plone import api
from plone.app.vocabularies.catalog import KeywordsVocabulary
from plone.i18n.normalizer import IIDNormalizer
from zope.component import queryUtility
from ade25.widgets.interfaces import IContentWidgets
from ade25.widgets import MessageFactory as _
class WidgetContentListing(BrowserView):
""" Basic context content listing """
def __call__(
self,
widget_name="listing",
widget_type="listing",
widget_data=None,
widget_mode="view",
**kw
):
self.params = {
"widget_name": widget_name,
"widget_type": widget_type,
"widget_mode": widget_mode,
"widget_data": widget_data,
}
self.has_content = len(self.contained_content_items()) > 0
return self.render()
def render(self):
return self.index()
@staticmethod
def normalizer():
return queryUtility(IIDNormalizer)
@property
def edit_mode(self):
if self.params["widget_mode"] == "edit":
return True
return False
@property
def record(self):
return self.params["widget_data"]
def custom_styles(self):
if self.record and "styles" in self.record:
return self.record["styles"]
else:
return None
def card_list_class(self):
context = aq_inner(self.context)
css_class = "c-list c-list--default c-list--{}".format(context.UID())
custom_styles = self.custom_styles()
if custom_styles:
class_container = custom_styles["class_container"]
for class_name in class_container.split(" "):
css_class = "{0} c-list--{1}".format(css_class, class_name)
if "custom" in custom_styles:
css_class = "{0} {1}".format(css_class, custom_styles["custom"])
return css_class
def widget_uid(self):
try:
widget_id = self.record["id"]
except (KeyError, TypeError):
widget_id = str(uuid_tool.uuid4())
return widget_id
def card_subject_classes(self, item):
subjects = item.Subject
class_list = [self.filter_value(keyword) for keyword in subjects]
return class_list
def card_css_classes(self, item):
class_list = self.card_subject_classes(item)
if class_list:
return " ".join(class_list)
else:
return "app-tag--all"
def available_keywords(self):
context = aq_inner(self.context)
keyword_vocabulary = KeywordsVocabulary()
vocabulary = keyword_vocabulary(context)
return vocabulary
def normalized_token(self, term):
return self.normalizer().normalize(term, locale="de")
def normalized_keywords(self):
vocabulary = self.available_keywords()
taxonomy = dict()
for index, term in enumerate(vocabulary):
element_value = term.value
taxonomy[index] = element_value
return taxonomy
def filter_value(self, term):
vocabulary = self.normalized_keywords()
filter_value = "o-tag--undefined"
for item_index, item_term in vocabulary.items():
if item_term == term:
filter_value = "o-tag--{0}".format(str(item_index))
return filter_value
def content_items(self):
results = []
display_limit = self.widget_stored_data().get("display_limit", "24")
brains = self.contained_content_items(limit=display_limit)
layout_reverse = self.widget_stored_data().get("display_reverse", False)
for brain in brains:
results.append(
{
"title": brain.Title,
"description": brain.Description,
"url": brain.getURL(),
"timestamp": brain.Date,
"uuid": brain.UID,
"css_classes": "o-list__item--{0} {1} {2}".format(
brain.UID,
"o-list__item--{0}".format(
layout_reverse and "reverse" or "default"
),
self.card_css_classes(brain),
),
}
)
return results
@staticmethod
def _get_acquisition_chain(context_object):
"""
@return: List of objects from context, its parents to the portal root
Example::
chain = getAcquisitionChain(self.context)
print "I will look up objects:" + str(list(chain))
@param object: Any content object
@return: Iterable of all parents from the direct parent to the site root
"""
# It is important to use inner to bootstrap the traverse,
# or otherwise we might get surprising parents
# E.g. the context of the view has the view as the parent
# unless inner is used
inner = context_object.aq_inner
content_node = inner
while content_node is not None:
yield content_node
if ISiteRoot.providedBy(content_node):
break
if not hasattr(content_node, "aq_parent"):
raise RuntimeError(
"Parent traversing interrupted by object: {}".format(
str(content_node)
)
)
content_node = content_node.aq_parent
def _base_query(self):
context = aq_inner(self.context)
return dict(
portal_type=[
"ade25.sitecontent.contentpage",
"ade25.sitecontent.sectionfolder",
],
path=dict(query="/".join(context.getPhysicalPath()), depth=1),
review_state="published",
sort_on="getObjPositionInParent",
)
def contained_content_items(self, limit=20):
context = aq_inner(self.context)
container = context
catalog = api.portal.get_tool(name="portal_catalog")
query = self._base_query()
query["sort_limit"] = limit
if IPage.providedBy(container):
container = aq_parent(container)
query["path"] = dict(query="/".join(container.getPhysicalPath()), depth=1)
items = catalog.searchResults(query)[:limit]
return items
def widget_stored_data(self):
context = aq_inner(self.context)
try:
storage = IContentWidgets(context)
content = storage.read_widget(self.widget_uid())
except TypeError:
content = dict()
return content
def get_widget_content(self, entry, fallback):
widget_content = self.widget_stored_data()
try:
entry_value = self.record['entry']
except KeyError:
entry_value = widget_content.get(entry, fallback)
return entry_value
def widget_content(self):
widget_content = self.widget_stored_data()
data = {
"title": self.get_widget_content("title", None),
"batch": self.get_widget_content("display_batch", False),
"images": self.get_widget_content("display_images", True),
"abstract": self.get_widget_content("display_abstract", True),
"limit": self.get_widget_content("display_limit", None),
"read_more": self.get_widget_content("display_read_more", True),
"read_more_value": self.get_widget_content(
"read_more_text", _(u"Read more")
),
"read_more_layout": self.get_widget_content(
"read_more_layout", "width-33"
),
"layout": self.get_widget_content("display_reverse", False),
"items": self.content_items(),
}
return data
class WidgetContentListingCards(BrowserView):
""" Basic context content listing displying a card grid"""
def __call__(self, widget_data=None, widget_mode="view", **kw):
self.params = {"widget_mode": widget_mode, "widget_data": widget_data}
self.has_content = len(self.contained_content_items()) > 0
return self.render()
def render(self):
return self.index()
@staticmethod
def normalizer():
return queryUtility(IIDNormalizer)
@property
def edit_mode(self):
if self.params["widget_mode"] == "edit":
return True
return False
@property
def record(self):
return self.params["widget_data"]
def custom_styles(self):
if self.record and "styles" in self.record:
return self.record["styles"]
else:
return None
def card_list_class(self):
context = aq_inner(self.context)
css_class = "c-list c-list--gutter c-list--grid c-list--{}".format(
context.UID()
)
custom_styles = self.custom_styles()
if custom_styles:
class_container = custom_styles["class_container"]
for class_name in class_container.split(" "):
css_class = "{0} c-list--{1}".format(css_class, class_name)
if "custom" in custom_styles:
css_class = "{0} {1}".format(css_class, custom_styles["custom"])
return css_class
def widget_uid(self):
try:
widget_id = self.record["id"]
except (KeyError, TypeError):
widget_id = str(uuid_tool.uuid4())
return widget_id
def card_subject_classes(self, item):
subjects = item.Subject
class_list = [self.filter_value(keyword) for keyword in subjects]
return class_list
def card_css_classes(self, item):
class_list = self.card_subject_classes(item)
if class_list:
return " ".join(class_list)
else:
return "o-tag--all"
def available_keywords(self):
context = aq_inner(self.context)
keyword_vocabulary = KeywordsVocabulary()
vocabulary = keyword_vocabulary(context)
return vocabulary
def normalized_token(self, term):
return self.normalizer().normalize(term, locale="de")
def normalized_keywords(self):
vocabulary = self.available_keywords()
taxonomy = dict()
for index, term in enumerate(vocabulary):
element_value = term.value
taxonomy[index] = element_value
return taxonomy
def filter_value(self, term):
vocabulary = self.normalized_keywords()
filter_value = "o-tag--undefined"
for item_index, item_term in vocabulary.items():
if item_term == term:
filter_value = "o-tag--{0}".format(str(item_index))
return filter_value
def content_items(self):
results = []
display_limit = int(self.widget_stored_data().get("display_limit", "24"))
brains = self.contained_content_items(limit=display_limit)
for brain in brains:
results.append(
{
"title": brain.Title,
"description": brain.Description,
"url": brain.getURL(),
"timestamp": brain.Date,
"uuid": brain.UID,
"css_classes": "c-card-list__item--{0} {1} {2}".format(
brain.UID,
"c-card-list__item--{0}".format(
self.widget_stored_data().get(
"display_columns", "width-100"
)
),
self.card_css_classes(brain),
),
}
)
return results
def _base_query(self):
context = aq_inner(self.context)
return dict(
portal_type=[
"ade25.sitecontent.contentpage",
"ade25.sitecontent.sectionfolder",
],
path=dict(query="/".join(context.getPhysicalPath()), depth=1),
review_state="published",
sort_on="getObjPositionInParent",
)
def contained_content_items(self, limit=20):
context = aq_inner(self.context)
container = context
catalog = api.portal.get_tool(name="portal_catalog")
query = self._base_query()
query["sort_limit"] = limit
if IPage.providedBy(container):
container = aq_parent(container)
query["path"] = dict(query="/".join(container.getPhysicalPath()), depth=1)
items = catalog.searchResults(query)[:limit]
return items
def widget_stored_data(self):
context = aq_inner(self.context)
try:
storage = IContentWidgets(context)
content = storage.read_widget(self.widget_uid())
except TypeError:
content = dict()
return content
def get_widget_content(self, entry, fallback):
widget_content = self.widget_stored_data()
try:
entry_value = self.record[entry]
except KeyError:
entry_value = widget_content.get(entry, fallback)
return entry_value
def widget_content(self):
translation_service = api.portal.get_tool(name="translation_service")
widget_content = self.widget_stored_data()
data = {
"title": self.get_widget_content("title", None),
"batch": self.get_widget_content("display_batch", False),
"images": self.get_widget_content("display_images", True),
"abstract": self.get_widget_content("display_abstract", True),
"limit": self.get_widget_content("display_limit", None),
"read_more": self.get_widget_content("display_read_more", True),
"read_more_value": self.get_widget_content(
"read_more_text",
translation_service.translate(
"Read more",
"ade25.widgets",
target_language=api.portal.get_default_language(),
)
),
"read_more_layout": self.get_widget_content("read_more_layout", "link"),
"image_scale": self.get_widget_content("image_scale", "ratio-4:3"),
"display_columns": self.get_widget_content("display_columns", "width-33"),
"items": self.content_items(),
}
return data
class FilterableCardListingWidget(BrowserView):
""" Basic context content listing """
def __call__(self, widget_data=None, widget_mode="view", **kw):
self.params = {"widget_mode": widget_mode, "widget_data": widget_data}
self.has_content = len(self.contained_content_items()) > 0
return self.render()
def render(self):
return self.index()
def contained_content_items(self):
context = aq_inner(self.context)
items = api.content.find(
context=context,
depth=1,
portal_type=[
"ade25.sitecontent.contentpage",
"ade25.sitecontent.sectionfolder",
],
review_state="published",
)
return items
| 35.189498
| 86
| 0.586583
| 1,640
| 15,413
| 5.284146
| 0.143902
| 0.043503
| 0.038772
| 0.04385
| 0.805331
| 0.786868
| 0.776598
| 0.759982
| 0.746596
| 0.736095
| 0
| 0.006408
| 0.31149
| 15,413
| 437
| 87
| 35.270023
| 0.810215
| 0.041718
| 0
| 0.753425
| 0
| 0
| 0.108931
| 0.018257
| 0
| 0
| 0
| 0
| 0
| 1
| 0.120548
| false
| 0
| 0.030137
| 0.024658
| 0.293151
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
beba10723eb905f5b55864f0d9bf87ca4c316d70
| 62,200
|
py
|
Python
|
tests/test_config.py
|
MarkMoTrin/model_analyzer
|
5d5c49f409fc7cc14c21529e8424729b0cc57627
|
[
"Apache-2.0"
] | null | null | null |
tests/test_config.py
|
MarkMoTrin/model_analyzer
|
5d5c49f409fc7cc14c21529e8424729b0cc57627
|
[
"Apache-2.0"
] | null | null | null |
tests/test_config.py
|
MarkMoTrin/model_analyzer
|
5d5c49f409fc7cc14c21529e8424729b0cc57627
|
[
"Apache-2.0"
] | null | null | null |
# Copyright (c) 2021 NVIDIA CORPORATION & AFFILIATES. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import re
from .mocks.mock_config import MockConfig
from .mocks.mock_numba import MockNumba
from .common import test_result_collector as trc
from model_analyzer.model_analyzer_exceptions \
import TritonModelAnalyzerException
from model_analyzer.cli.cli import CLI
from model_analyzer.config.input.objects.config_plot import ConfigPlot
from model_analyzer.config.input.config_list_string import ConfigListString
from model_analyzer.config.input.config_list_generic import ConfigListGeneric
from model_analyzer.config.input.config_primitive import ConfigPrimitive
from model_analyzer.config.input.config_union import ConfigUnion
from model_analyzer.config.input.config_object import ConfigObject
from model_analyzer.config.input.config_enum import ConfigEnum
from model_analyzer.config.input.config_sweep import ConfigSweep
from model_analyzer.config.input.config_list_numeric import \
ConfigListNumeric
from model_analyzer.config.input.config_command_profile \
import ConfigCommandProfile
from model_analyzer.config.input.config_command_analyze \
import ConfigCommandAnalyze
from model_analyzer.config.input.config_command_report \
import ConfigCommandReport
from model_analyzer.config.input.objects.config_model_profile_spec \
import ConfigModelProfileSpec
from model_analyzer.constants import \
CONFIG_PARSER_FAILURE
class TestConfig(trc.TestResultCollector):
def _evaluate_config(self, args, yaml_content, subcommand='profile'):
mock_numba = MockNumba(
mock_paths=['model_analyzer.config.input.config_command_profile'])
mock_config = MockConfig(args, yaml_content)
mock_config.start()
mock_numba.start()
if subcommand == 'report':
config = ConfigCommandReport()
elif subcommand == 'analyze':
config = ConfigCommandAnalyze()
else:
config = ConfigCommandProfile()
cli = CLI()
cli.add_subcommand(cmd=subcommand,
config=config,
help="Test subcommand help")
cli.parse()
mock_config.stop()
mock_numba.stop()
return config
def _assert_error_on_evaluate_config(self,
args,
yaml_content,
subcommand='profile'):
mock_config = MockConfig(args, yaml_content)
mock_config.start()
if subcommand == 'report':
config = ConfigCommandReport()
elif subcommand == 'analyze':
config = ConfigCommandAnalyze()
else:
config = ConfigCommandProfile()
cli = CLI()
cli.add_subcommand(cmd=subcommand,
config=config,
help="Test subcommand help")
# When a required field is not specified, parse will lead to an
# exceptin
with self.assertRaises(TritonModelAnalyzerException):
cli.parse()
mock_config.stop()
def _assert_equality_of_model_configs(self, model_configs,
expected_model_configs):
self.assertEqual(len(model_configs), len(expected_model_configs))
for model_config, expected_model_config \
in zip(model_configs, expected_model_configs):
self.assertEqual(expected_model_config.model_name(),
model_config.model_name())
self.assertEqual(expected_model_config.parameters(),
model_config.parameters())
self.assertEqual(expected_model_config.constraints(),
model_config.constraints())
self.assertEqual(expected_model_config.objectives(),
model_config.objectives())
self.assertEqual(expected_model_config.model_config_parameters(),
model_config.model_config_parameters())
def _assert_equality_of_plot_configs(self, plot_configs,
expected_plot_configs):
self.assertEqual(len(plot_configs), len(expected_plot_configs))
for plot_config, expected_plot_config \
in zip(plot_configs, expected_plot_configs):
self.assertEqual(expected_plot_config.name(), plot_config.name())
self.assertEqual(expected_plot_config.title(), plot_config.title())
self.assertEqual(expected_plot_config.x_axis(),
plot_config.x_axis())
self.assertEqual(expected_plot_config.y_axis(),
plot_config.y_axis())
self.assertEqual(expected_plot_config.monotonic(),
plot_config.monotonic())
def _assert_model_config_types(self, model_config):
self.assertIsInstance(model_config.field_type(), ConfigUnion)
if isinstance(model_config.field_type().raw_value(), ConfigListGeneric):
self.assertIsInstance(
model_config.field_type().raw_value().container_type(),
ConfigUnion)
else:
self.assertIsInstance(model_config.field_type().raw_value(),
ConfigObject)
def _assert_model_object_types(self,
model_config,
model_name,
check_parameters=False,
check_concurrency=False,
check_batch_size=False):
if isinstance(model_config, ConfigUnion):
self.assertIsInstance(model_config.raw_value(), ConfigObject)
self.assertIsInstance(
model_config.raw_value().raw_value()[model_name], ConfigObject)
model_config = model_config.raw_value().raw_value()[model_name]
else:
self.assertIsInstance(model_config, ConfigObject)
if check_parameters:
parameters_config = model_config.raw_value()['parameters']
self.assertIsInstance(parameters_config, ConfigObject)
if check_concurrency:
self.assertIsInstance(
parameters_config.raw_value()['concurrency'],
ConfigListNumeric)
def _assert_model_config_params(self, model_config_parameters):
self.assertIsInstance(model_config_parameters, ConfigObject)
input_param = model_config_parameters.raw_value()['input']
self.assertIsInstance(input_param, ConfigSweep)
# Is list of params
self.assertIsInstance(input_param.raw_value(), ConfigListGeneric)
# Each subitem is also a list
self.assertIsInstance(input_param.raw_value().container_type(),
ConfigUnion)
single_sweep_param = input_param.raw_value().raw_value()[0].raw_value()
self.assertIsInstance(single_sweep_param.raw_value()[0], ConfigObject)
# Check types for 'name'
name_param = single_sweep_param.raw_value()[0].raw_value()['name']
self.assertIsInstance(name_param, ConfigSweep)
self.assertIsInstance(name_param.raw_value(), ConfigListGeneric)
self.assertIsInstance(name_param.raw_value().container_type(),
ConfigUnion)
# Check types for 'data_type'
data_type_param = single_sweep_param.raw_value()[0].raw_value(
)['data_type']
self.assertIsInstance(data_type_param, ConfigSweep)
self.assertIsInstance(data_type_param.raw_value(), ConfigListGeneric)
self.assertIsInstance(data_type_param.raw_value().container_type(),
ConfigUnion)
self.assertIsInstance(data_type_param.raw_value().raw_value()[0],
ConfigUnion)
self.assertIsInstance(
data_type_param.raw_value().raw_value()[0].raw_value(), ConfigEnum)
# Check types for 'dims'
dims_param = single_sweep_param.raw_value()[0].raw_value()['dims']
self.assertIsInstance(dims_param, ConfigSweep)
self.assertIsInstance(dims_param.raw_value(), ConfigListGeneric)
self.assertIsInstance(dims_param.raw_value().container_type(),
ConfigUnion)
self.assertIsInstance(dims_param.raw_value().raw_value()[0],
ConfigUnion)
self.assertIsInstance(dims_param.raw_value().raw_value()[0].raw_value(),
ConfigListGeneric)
self.assertIsInstance(
dims_param.raw_value().raw_value()[0].raw_value().raw_value()[0],
ConfigPrimitive)
# Check types for 'format'
format_param = single_sweep_param.raw_value()[0].raw_value()['format']
self.assertIsInstance(format_param, ConfigSweep)
self.assertIsInstance(format_param.raw_value(), ConfigListGeneric)
self.assertIsInstance(format_param.raw_value().container_type(),
ConfigUnion)
def _assert_model_str_type(self, model_config):
self.assertIsInstance(model_config, ConfigUnion)
self.assertIsInstance(model_config.raw_value(), ConfigPrimitive)
def test_config(self):
args = [
'model-analyzer', 'profile', '--model-repository', 'cli_repository',
'-f', 'path-to-config-file', '--profile-models', 'vgg11'
]
yaml_content = 'model_repository: yaml_repository'
config = self._evaluate_config(args, yaml_content)
# CLI flag has the highest priority
self.assertTrue(
config.get_all_config()['model_repository'] == 'cli_repository')
args = [
'model-analyzer', 'profile', '-f', 'path-to-config-file',
'--profile-models', 'vgg11'
]
yaml_content = 'model_repository: yaml_repository'
config = self._evaluate_config(args, yaml_content)
# If CLI flag doesn't exist, YAML config has the highest priority
self.assertTrue(
config.get_all_config()['model_repository'] == 'yaml_repository')
args = ['model-analyzer', 'profile', '-f', 'path-to-config-file']
yaml_content = 'model_repository: yaml_repository'
self._assert_error_on_evaluate_config(args, yaml_content)
def test_range_and_list_values(self):
args = [
'model-analyzer', 'profile', '--model-repository', 'cli_repository',
'-f', 'path-to-config-file'
]
yaml_content = 'profile_models: model_1,model_2'
config = self._evaluate_config(args, yaml_content)
expected_model_configs = [
ConfigModelProfileSpec('model_1',
parameters={
'batch_sizes': [1],
'concurrency': []
},
objectives={'perf_throughput': 10}),
ConfigModelProfileSpec('model_2',
parameters={
'batch_sizes': [1],
'concurrency': []
},
objectives={'perf_throughput': 10})
]
self._assert_equality_of_model_configs(
config.get_all_config()['profile_models'], expected_model_configs)
self.assertIsInstance(
config.get_config()['profile_models'].field_type().raw_value(),
ConfigObject)
yaml_content = """
profile_models:
- model_1
- model_2
"""
config = self._evaluate_config(args, yaml_content)
self._assert_equality_of_model_configs(
config.get_all_config()['profile_models'], expected_model_configs)
model_config = config.get_config()['profile_models']
self._assert_model_config_types(model_config)
self.assertIsInstance(
model_config.field_type().raw_value().raw_value()
['model_1'].raw_value()['objectives'], ConfigUnion)
self.assertIsInstance(
model_config.field_type().raw_value().raw_value()
['model_1'].raw_value()['parameters'], ConfigObject)
args = [
'model-analyzer', 'profile', '--model-repository', 'cli_repository',
'-f', 'path-to-config-file', '--profile-models', 'model_1,model_2'
]
yaml_content = """
batch_sizes:
- 2
- 3
"""
config = self._evaluate_config(args, yaml_content)
self.assertTrue(config.get_all_config()['batch_sizes'] == [2, 3])
self.assertIsInstance(config.get_config()['batch_sizes'].field_type(),
ConfigListNumeric)
yaml_content = """
batch_sizes: 2
"""
config = self._evaluate_config(args, yaml_content)
self.assertTrue(config.get_all_config()['batch_sizes'] == [2])
self.assertIsInstance(config.get_config()['batch_sizes'].field_type(),
ConfigListNumeric)
yaml_content = """
concurrency: 2
"""
config = self._evaluate_config(args, yaml_content)
self.assertTrue(config.get_all_config()['concurrency'] == [2])
self.assertIsInstance(config.get_config()['concurrency'].field_type(),
ConfigListNumeric)
self.assertTrue(config.get_all_config()['batch_sizes'] == [1])
self.assertIsInstance(config.get_config()['batch_sizes'].field_type(),
ConfigListNumeric)
yaml_content = """
batch_sizes:
start: 2
stop: 6
"""
config = self._evaluate_config(args, yaml_content)
self.assertTrue(
config.get_all_config()['batch_sizes'] == [2, 3, 4, 5, 6])
self.assertIsInstance(config.get_config()['batch_sizes'].field_type(),
ConfigListNumeric)
yaml_content = """
batch_sizes:
start: 2
stop: 6
step: 2
"""
config = self._evaluate_config(args, yaml_content)
self.assertTrue(config.get_all_config()['batch_sizes'] == [2, 4, 6])
self.assertIsInstance(config.get_config()['batch_sizes'].field_type(),
ConfigListNumeric)
def test_object(self):
args = [
'model-analyzer', 'profile', '--model-repository', 'cli_repository',
'-f', 'path-to-config-file'
]
yaml_content = """
profile_models:
-
vgg_16_graphdef:
parameters:
concurrency:
- 1
- 2
- 3
- 4
- vgg_19_graphdef
"""
config = self._evaluate_config(args, yaml_content)
model_config = config.get_config()['profile_models']
self._assert_model_config_types(model_config)
expected_model_objects = [
ConfigModelProfileSpec('vgg_16_graphdef',
parameters={
'batch_sizes': [1],
'concurrency': [1, 2, 3, 4]
},
objectives={'perf_throughput': 10}),
ConfigModelProfileSpec('vgg_19_graphdef',
parameters={
'batch_sizes': [1],
'concurrency': []
},
objectives={'perf_throughput': 10})
]
# Check the types for the first value
first_model = model_config.field_type().raw_value().raw_value(
)['vgg_16_graphdef']
self._assert_model_object_types(first_model,
'vgg_16_graphdef',
check_parameters=True,
check_concurrency=True)
# Check the types for the second value
second_model = model_config.field_type().raw_value().raw_value(
)['vgg_19_graphdef']
self._assert_model_object_types(second_model, 'vgg_19_graphdef')
self._assert_equality_of_model_configs(
config.get_all_config()['profile_models'], expected_model_objects)
yaml_content = """
profile_models:
vgg_16_graphdef:
parameters:
concurrency:
- 1
- 2
- 3
- 4
vgg_19_graphdef:
parameters:
concurrency:
- 1
- 2
- 3
- 4
batch_sizes:
start: 2
stop: 6
step: 2
"""
expected_model_objects = [
ConfigModelProfileSpec('vgg_16_graphdef',
parameters={
'batch_sizes': [1],
'concurrency': [1, 2, 3, 4]
},
objectives={'perf_throughput': 10}),
ConfigModelProfileSpec('vgg_19_graphdef',
parameters={
'concurrency': [1, 2, 3, 4],
'batch_sizes': [2, 4, 6]
},
objectives={'perf_throughput': 10})
]
config = self._evaluate_config(args, yaml_content)
self._assert_equality_of_model_configs(
config.get_all_config()['profile_models'], expected_model_objects)
model_config = config.get_config()['profile_models']
self._assert_model_config_types(model_config)
# first model
first_model = model_config.field_type().raw_value().raw_value(
)['vgg_16_graphdef']
self._assert_model_object_types(first_model,
'vgg_16_graphdef',
check_parameters=True,
check_concurrency=True,
check_batch_size=True)
# second model
second_model = model_config.field_type().raw_value().raw_value(
)['vgg_19_graphdef']
self._assert_model_object_types(second_model,
'vgg_19_graphdef',
check_parameters=True,
check_concurrency=True,
check_batch_size=True)
def test_constraints(self):
args = [
'model-analyzer', 'profile', '--model-repository', 'cli_repository',
'-f', 'path-to-config-file'
]
yaml_content = """
profile_models:
-
vgg_16_graphdef:
parameters:
concurrency:
- 1
- 2
- 3
- 4
objectives:
perf_throughput: 10
gpu_used_memory: 5
constraints:
gpu_used_memory:
max: 80
- vgg_19_graphdef
"""
config = self._evaluate_config(args, yaml_content)
expected_model_objects = [
ConfigModelProfileSpec(
'vgg_16_graphdef',
parameters={
'batch_sizes': [1],
'concurrency': [1, 2, 3, 4]
},
objectives={
'perf_throughput': 10,
'gpu_used_memory': 5
},
constraints={'gpu_used_memory': {
'max': 80,
}}),
ConfigModelProfileSpec('vgg_19_graphdef',
parameters={
'batch_sizes': [1],
'concurrency': []
},
objectives={'perf_throughput': 10})
]
self._assert_equality_of_model_configs(
config.get_all_config()['profile_models'], expected_model_objects)
# GPU Memory shouldn't have min
yaml_content = """
profile_models:
-
vgg_16_graphdef:
parameters:
concurrency:
- 1
- 2
- 3
- 4
objectives:
- perf_throughput
- gpu_used_memory
constraints:
gpu_memory:
max: 80
min: 45
- vgg_19_graphdef
"""
self._assert_error_on_evaluate_config(args, yaml_content)
# Test objective key that is not one of the supported metrics
yaml_content = """
profile_models:
-
vgg_16_graphdef:
parameters:
concurrency:
- 1
- 2
- 3
- 4
objectives:
- throughput
constraints:
gpu_used_memory:
max: 80
- vgg_19_graphdef
"""
self._assert_error_on_evaluate_config(args, yaml_content)
def test_validation(self):
args = [
'model-analyzer', 'profile', '--model-repository', 'cli_repository',
'-f', 'path-to-config-file'
]
# end key should not be included in concurrency
yaml_content = """
profile_models:
-
vgg_16_graphdef:
parameters:
concurrency:
start: 4
stop: 12
end: 2
"""
self._assert_error_on_evaluate_config(args, yaml_content)
args = [
'model-analyzer', 'profile', '--model-repository', 'cli_repository',
'-f', 'path-to-config-file'
]
yaml_content = """
profile_models:
-
vgg_16_graphdef:
parameters:
concurrency:
start: 13
stop: 12
"""
self._assert_error_on_evaluate_config(args, yaml_content)
def test_config_model(self):
args = [
'model-analyzer', 'profile', '--model-repository', 'cli_repository',
'-f', 'path-to-config-file'
]
yaml_content = """
profile_models:
-
vgg_16_graphdef:
model_config_parameters:
instance_group:
-
kind: KIND_GPU
count: 1
"""
config = self._evaluate_config(args, yaml_content)
model_configs = config.get_all_config()['profile_models']
expected_model_configs = [
ConfigModelProfileSpec('vgg_16_graphdef',
parameters={
'batch_sizes': [1],
'concurrency': []
},
objectives={'perf_throughput': 10},
model_config_parameters={
'instance_group': [[{
'kind': ['KIND_GPU'],
'count': [1]
}]]
})
]
self._assert_equality_of_model_configs(model_configs,
expected_model_configs)
yaml_content = """
profile_models:
-
vgg_16_graphdef:
model_config_parameters:
instance_group:
-
kind: KIND_GPU
count: 1
"""
config = self._evaluate_config(args, yaml_content)
model_configs = config.get_all_config()['profile_models']
expected_model_configs = [
ConfigModelProfileSpec('vgg_16_graphdef',
parameters={
'batch_sizes': [1],
'concurrency': []
},
objectives={'perf_throughput': 10},
model_config_parameters={
'instance_group': [[{
'kind': ['KIND_GPU'],
'count': [1]
}]]
})
]
self._assert_equality_of_model_configs(model_configs,
expected_model_configs)
args = [
'model-analyzer', 'profile', '--model-repository', 'cli_repository',
'-f', 'path-to-config-file'
]
yaml_content = """
profile_models:
-
vgg_16_graphdef:
model_config_parameters:
instance_group:
-
kind: KIND_GPU
count: 1
-
kind: KIND_CPU
count: 1
"""
config = self._evaluate_config(args, yaml_content)
model_configs = config.get_all_config()['profile_models']
expected_model_configs = [
ConfigModelProfileSpec('vgg_16_graphdef',
parameters={
'batch_sizes': [1],
'concurrency': []
},
objectives={'perf_throughput': 10},
model_config_parameters={
'instance_group': [[{
'kind': ['KIND_GPU'],
'count': [1]
}, {
'kind': ['KIND_CPU'],
'count': [1]
}]]
})
]
self._assert_equality_of_model_configs(model_configs,
expected_model_configs)
args = [
'model-analyzer', 'profile', '--model-repository', 'cli_repository',
'-f', 'path-to-config-file'
]
yaml_content = """
profile_models:
-
vgg_16_graphdef:
model_config_parameters:
instance_group:
-
-
kind: KIND_GPU
count: 1
-
-
kind: KIND_CPU
count: 1
"""
config = self._evaluate_config(args, yaml_content)
model_configs = config.get_all_config()['profile_models']
expected_model_configs = [
ConfigModelProfileSpec('vgg_16_graphdef',
parameters={
'batch_sizes': [1],
'concurrency': []
},
objectives={'perf_throughput': 10},
model_config_parameters={
'instance_group': [[{
'kind': ['KIND_GPU'],
'count': [1]
}], [{
'kind': ['KIND_CPU'],
'count': [1]
}]]
})
]
self._assert_equality_of_model_configs(model_configs,
expected_model_configs)
yaml_content = """
profile_models:
-
vgg_16_graphdef:
model_config_parameters:
input:
-
name: NV_MODEL_INPUT
data_type: TYPE_FP32
format: FORMAT_NHWC
dims: [256, 256, 3]
"""
config = self._evaluate_config(args, yaml_content)
model_configs = config.get_all_config()['profile_models']
expected_model_configs = [
ConfigModelProfileSpec('vgg_16_graphdef',
parameters={
'batch_sizes': [1],
'concurrency': []
},
objectives={'perf_throughput': 10},
model_config_parameters={
'input': [[{
'name': ['NV_MODEL_INPUT'],
'data_type': ['TYPE_FP32'],
'format': ['FORMAT_NHWC'],
'dims': [[256, 256, 3]]
}]]
})
]
model_config = config.get_config()['profile_models']
self._assert_model_config_types(model_config)
model = model_config.field_type().raw_value().raw_value(
)['vgg_16_graphdef']
self._assert_model_object_types(model, 'vgg_16_graphdef')
model_config_parameters = model.raw_value()['model_config_parameters']
self._assert_model_config_params(model_config_parameters)
self._assert_equality_of_model_configs(model_configs,
expected_model_configs)
yaml_content = """
profile_models:
-
vgg_16_graphdef:
perf_analyzer_flags:
measurement-interval: 10000
model-version: 2
streaming: "header:value"
"""
config = self._evaluate_config(args, yaml_content)
model_configs = config.get_all_config()['profile_models']
expected_model_configs = [
ConfigModelProfileSpec('vgg_16_graphdef',
parameters={
'batch_sizes': [1],
'concurrency': []
},
objectives={'perf_throughput': 10},
perf_analyzer_flags={
'measurement-interval': 10000,
'model-version': 2,
'streaming': 'header:value'
})
]
self._assert_equality_of_model_configs(model_configs,
expected_model_configs)
yaml_content = """
profile_models:
-
vgg_16_graphdef:
perf_analyzer_flags:
measurement-interval: 10000
model-version: 2
shape: ["name1:1,2,3", "name2:4,5,6"]
"""
config = self._evaluate_config(args, yaml_content)
model_configs = config.get_all_config()['profile_models']
expected_model_configs = [
ConfigModelProfileSpec('vgg_16_graphdef',
parameters={
'batch_sizes': [1],
'concurrency': []
},
objectives={'perf_throughput': 10},
perf_analyzer_flags={
'measurement-interval': 10000,
'model-version': 2,
'shape': ["name1:1,2,3", "name2:4,5,6"]
})
]
self._assert_equality_of_model_configs(model_configs,
expected_model_configs)
yaml_content = """
profile_models:
-
vgg_16_graphdef:
perf_analyzer_flags:
latency_report_file: ["file1", "file2"]
"""
with self.assertRaises(TritonModelAnalyzerException):
# latency_report_file is not additive
config = self._evaluate_config(args, yaml_content)
yaml_content = """
profile_models:
-
vgg_16_graphdef:
perf_analyzer_flags:
disallowed-perf-flag: some_value
"""
with self.assertRaises(TritonModelAnalyzerException):
config = self._evaluate_config(args, yaml_content)
def test_config_sweep(self):
config_sweep = ConfigSweep(ConfigPrimitive(int))
config_sweep.set_value(2)
def test_config_plot(self):
args = ['model-analyzer', 'report', '-f', 'path-to-config-file']
yaml_content = """
report_model_configs: vgg_16_graphdef
plots:
test_plot:
title: Throughput vs. Latency
x_axis: perf_throughput
y_axis: perf_latency_p99
"""
config = self._evaluate_config(args, yaml_content, subcommand='report')
plot_configs = config.get_all_config()['plots']
expected_plot_configs = [
ConfigPlot('test_plot',
title='Throughput vs. Latency',
x_axis='perf_throughput',
y_axis='perf_latency_p99')
]
self._assert_equality_of_plot_configs(plot_configs,
expected_plot_configs)
yaml_content = """
report_model_configs: vgg_16_graphdef
plots:
- test_plot1:
title: Throughput vs. Latency
x_axis: perf_throughput
y_axis: perf_latency_p99
- test_plot2:
title: GPU Memory vs. Latency
x_axis: gpu_used_memory
y_axis: perf_latency_p99
"""
config = self._evaluate_config(args, yaml_content, subcommand='report')
plot_configs = config.get_all_config()['plots']
expected_plot_configs = [
ConfigPlot('test_plot1',
title='Throughput vs. Latency',
x_axis='perf_throughput',
y_axis='perf_latency_p99'),
ConfigPlot('test_plot2',
title='GPU Memory vs. Latency',
x_axis='gpu_used_memory',
y_axis='perf_latency_p99')
]
self._assert_equality_of_plot_configs(plot_configs,
expected_plot_configs)
yaml_content = """
report_model_configs: vgg_16_graphdef
plots:
test_plot1:
title: Throughput vs. Latency
x_axis: perf_throughput
y_axis: perf_latency_p99
test_plot2:
title: GPU Memory vs. Latency
x_axis: gpu_used_memory
y_axis: perf_latency_p99
"""
config = self._evaluate_config(args, yaml_content, subcommand='report')
plot_configs = config.get_all_config()['plots']
expected_plot_configs = [
ConfigPlot('test_plot1',
title='Throughput vs. Latency',
x_axis='perf_throughput',
y_axis='perf_latency_p99'),
ConfigPlot('test_plot2',
title='GPU Memory vs. Latency',
x_axis='gpu_used_memory',
y_axis='perf_latency_p99')
]
self._assert_equality_of_plot_configs(plot_configs,
expected_plot_configs)
yaml_content = """
report_model_configs:
vgg_16_graphdef:
plots:
test_plot1:
title: Throughput vs. Latency
x_axis: perf_throughput
y_axis: perf_latency_p99
monotonic: True
test_plot2:
title: GPU Memory vs. Latency
x_axis: gpu_used_memory
y_axis: perf_latency_p99
monotonic: False
"""
config = self._evaluate_config(args, yaml_content, subcommand='report')
plot_configs = config.get_all_config()['report_model_configs'][0].plots(
)
expected_plot_configs = [
ConfigPlot('test_plot1',
title='Throughput vs. Latency',
x_axis='perf_throughput',
y_axis='perf_latency_p99',
monotonic=True),
ConfigPlot('test_plot2',
title='GPU Memory vs. Latency',
x_axis='gpu_used_memory',
y_axis='perf_latency_p99',
monotonic=False)
]
self._assert_equality_of_plot_configs(plot_configs,
expected_plot_configs)
def test_error_messages(self):
# ConfigListNumeric
config_numeric = ConfigListNumeric(float)
config_numeric.set_name('key')
config_status = config_numeric.set_value({
'start': 12,
'stop': 15,
'undefined_key': 8
})
config_message = config_status.message()
result = re.search('.*\'start\'.*\'stop\'.*\'undefined_key\'.*',
config_message)
self.assertTrue(config_status.status() == CONFIG_PARSER_FAILURE)
self.assertIsNotNone(config_message)
self.assertIsNotNone(result)
config_numeric = ConfigListNumeric(float)
config_numeric.set_name('key')
config_status = config_numeric.set_value({'start': 12, 'stop': 'two'})
print(config_message)
self.assertTrue(config_status.status() == CONFIG_PARSER_FAILURE)
config_numeric = ConfigListNumeric(float)
config_numeric.set_name('key')
config_status = config_numeric.set_value({'start': 'five', 'stop': 2})
config_message = config_status.message()
print(config_message)
self.assertTrue(config_status.status() == CONFIG_PARSER_FAILURE)
config_numeric = ConfigListNumeric(float)
config_numeric.set_name('key')
config_status = config_numeric.set_value({'start': 10, 'stop': 2})
print(config_status.message())
self.assertTrue(config_status.status() == CONFIG_PARSER_FAILURE)
# ConfigUnion error message
config_union = ConfigUnion(
[ConfigListNumeric(float),
ConfigPrimitive(str)])
config_union.set_name('key')
# Dictionaries are not accepted.
config_status = config_union.set_value({'a': 'b'})
print(config_status.message())
self.assertTrue(config_status.status() == CONFIG_PARSER_FAILURE)
# ConfigEnum
config_enum = ConfigEnum(['a', 'b'])
config_enum.set_name('key')
config_status = config_enum.set_value('c')
print(config_status.message())
self.assertTrue(config_status.status() == CONFIG_PARSER_FAILURE)
# ConfigListGeneric
config_list_generic = ConfigListGeneric(ConfigPrimitive(float))
config_list_generic.set_name('key')
config_status = config_list_generic.set_value({'a': 'b'})
print(config_status.message())
self.assertTrue(config_status.status() == CONFIG_PARSER_FAILURE)
# ConfigListString
config_list_string = ConfigListString()
config_list_string.set_name('key')
config_status = config_list_string.set_value({'a': 'b'})
print(config_status.message())
self.assertTrue(config_status.status() == CONFIG_PARSER_FAILURE)
config_status = config_list_string.set_value([{'a': 'b'}])
print(config_status.message())
self.assertTrue(config_status.status() == CONFIG_PARSER_FAILURE)
# ConfigObject
config_object = ConfigObject(schema={'key': ConfigPrimitive(float)})
config_object.set_name('key')
config_status = config_object.set_value({'undefiend_key': 2.0})
print(config_status.message())
self.assertTrue(config_status.status() == CONFIG_PARSER_FAILURE)
config_status = config_object.set_value({'key': [1, 2, 3]})
print(config_status.message())
self.assertTrue(config_status.status() == CONFIG_PARSER_FAILURE)
config_status = config_object.set_value([1, 2, 3])
print(config_status.message())
self.assertTrue(config_status.status() == CONFIG_PARSER_FAILURE)
# ConfigPrimitive
config_primitive = ConfigPrimitive(float)
config_primitive.set_name('key')
config_status = config_primitive.set_value('a')
print(config_status.message())
self.assertTrue(config_status.status() == CONFIG_PARSER_FAILURE)
def test_autofill(self):
args = [
'model-analyzer', 'profile', '--model-repository', 'cli_repository',
'-f', 'path-to-config-file'
]
yaml_content = """
profile_models:
-
vgg_16_graphdef:
model_config_parameters:
instance_group:
-
kind: KIND_GPU
count: 1
"""
# Test defaults
config = self._evaluate_config(args, yaml_content)
model_configs = config.get_all_config()['profile_models']
expected_model_configs = [
ConfigModelProfileSpec('vgg_16_graphdef',
parameters={
'batch_sizes': [1],
'concurrency': []
},
objectives={'perf_throughput': 10},
model_config_parameters={
'instance_group': [[{
'kind': ['KIND_GPU'],
'count': [1]
}]]
})
]
self._assert_equality_of_model_configs(model_configs,
expected_model_configs)
yaml_content = """
objectives:
perf_throughput: 10
gpu_used_memory: 5
constraints:
gpu_used_memory:
max: 80
profile_models:
-
vgg_16_graphdef:
parameters:
batch_sizes:
- 16
- 32
concurrency:
start: 2
stop: 4
step: 2
model_config_parameters:
instance_group:
-
kind: KIND_GPU
count: 1
"""
# Test autofill objectives and constraints
config = self._evaluate_config(args, yaml_content)
model_configs = config.get_all_config()['profile_models']
expected_model_configs = [
ConfigModelProfileSpec(
'vgg_16_graphdef',
parameters={
'batch_sizes': [16, 32],
'concurrency': [2, 4]
},
objectives={
'perf_throughput': 10,
'gpu_used_memory': 5
},
constraints={'gpu_used_memory': {
'max': 80,
}},
model_config_parameters={
'instance_group': [[{
'kind': ['KIND_GPU'],
'count': [1]
}]]
})
]
self._assert_equality_of_model_configs(model_configs,
expected_model_configs)
yaml_content = """
concurrency:
start: 2
stop : 4
step: 2
profile_models:
-
vgg_16_graphdef:
parameters:
batch_sizes:
- 16
- 32
objectives:
gpu_used_memory: 10
constraints:
perf_latency_p99:
max: 8000
model_config_parameters:
instance_group:
-
kind: KIND_GPU
count: 1
"""
# Test autofill concurrency
config = self._evaluate_config(args, yaml_content)
model_configs = config.get_all_config()['profile_models']
expected_model_configs = [
ConfigModelProfileSpec(
'vgg_16_graphdef',
parameters={
'batch_sizes': [16, 32],
'concurrency': [2, 4]
},
objectives={'gpu_used_memory': 10},
constraints={'perf_latency_p99': {
'max': 8000
}},
model_config_parameters={
'instance_group': [[{
'kind': ['KIND_GPU'],
'count': [1]
}]]
})
]
self._assert_equality_of_model_configs(model_configs,
expected_model_configs)
yaml_content = """
batch_sizes:
- 16
- 32
profile_models:
-
vgg_16_graphdef:
parameters:
concurrency:
start: 2
stop : 4
step: 2
objectives:
- gpu_used_memory
constraints:
perf_latency_p99:
max: 8000
model_config_parameters:
instance_group:
-
kind: KIND_GPU
count: 1
"""
# Test autofill batch sizes
config = self._evaluate_config(args, yaml_content)
model_configs = config.get_all_config()['profile_models']
expected_model_configs = [
ConfigModelProfileSpec(
'vgg_16_graphdef',
parameters={
'batch_sizes': [16, 32],
'concurrency': [2, 4]
},
objectives={'gpu_used_memory': 10},
constraints={'perf_latency_p99': {
'max': 8000
}},
model_config_parameters={
'instance_group': [[{
'kind': ['KIND_GPU'],
'count': [1]
}]]
})
]
self._assert_equality_of_model_configs(model_configs,
expected_model_configs)
yaml_content = """
objectives:
perf_throughput: 10
perf_latency_p99: 5
constraints:
perf_latency_p99:
max: 8000
gpu_used_memory:
max: 10000
batch_sizes:
- 16
- 32
concurrency:
start: 2
stop: 4
step: 2
profile_models:
-
vgg_16_graphdef:
parameters:
concurrency:
start: 5
stop : 7
objectives:
- gpu_used_memory
-
vgg_19_graphdef:
parameters:
batch_sizes:
- 1
- 2
constraints:
perf_latency_p99:
max: 8000
"""
# Test autofill batch sizes
config = self._evaluate_config(args, yaml_content)
model_configs = config.get_all_config()['profile_models']
expected_model_configs = [
ConfigModelProfileSpec('vgg_16_graphdef',
parameters={
'batch_sizes': [16, 32],
'concurrency': [5, 6, 7]
},
objectives={'gpu_used_memory': 10},
constraints={
'perf_latency_p99': {
'max': 8000
},
'gpu_used_memory': {
'max': 10000
}
}),
ConfigModelProfileSpec(
'vgg_19_graphdef',
parameters={
'batch_sizes': [1, 2],
'concurrency': [2, 4]
},
objectives={
'perf_throughput': 10,
'perf_latency_p99': 5
},
constraints={'perf_latency_p99': {
'max': 8000
}})
]
self._assert_equality_of_model_configs(model_configs,
expected_model_configs)
def test_config_shorthands(self):
"""
test flags like --latency-budget
"""
for constraint_shorthand in [
('--latency-budget', 'max', 'perf_latency_p99'),
('--min-throughput', 'min', 'perf_throughput')
]:
args = [
'model-analyzer', 'analyze', '--analysis-models', 'test_model',
constraint_shorthand[0], '40'
]
# check that global and model specific constraints are filled
yaml_content = ""
config = self._evaluate_config(args,
yaml_content,
subcommand='analyze')
self.assertDictEqual(
config.get_all_config()['constraints'],
{constraint_shorthand[2]: {
constraint_shorthand[1]: 40
}})
self.assertDictEqual(
config.get_all_config()['analysis_models'][0].constraints(),
{constraint_shorthand[2]: {
constraint_shorthand[1]: 40
}})
# check that model specific constraints are appended to
args = [
'model-analyzer', 'analyze', constraint_shorthand[0], '40',
'-f', 'path-to-config-file'
]
yaml_content = """
analysis_models:
test_model:
constraints:
gpu_used_memory:
max : 100
"""
config = self._evaluate_config(args,
yaml_content,
subcommand='analyze')
self.assertDictEqual(
config.get_all_config()['constraints'],
{constraint_shorthand[2]: {
constraint_shorthand[1]: 40
}})
self.assertDictEqual(
config.get_all_config()['analysis_models'][0].constraints(), {
constraint_shorthand[2]: {
constraint_shorthand[1]: 40
},
'gpu_used_memory': {
'max': 100
}
})
# check that model specific constraints are replaced
yaml_content = f"""
analysis_models:
test_model:
constraints:
{constraint_shorthand[2]}:
{constraint_shorthand[1]} : 100
"""
config = self._evaluate_config(args,
yaml_content,
subcommand='analyze')
self.assertDictEqual(
config.get_all_config()['analysis_models'][0].constraints(),
{constraint_shorthand[2]: {
constraint_shorthand[1]: 40
}})
# check that global constraints are appended to
yaml_content = """
analysis_models: test_model
constraints:
gpu_used_memory:
max : 100
"""
config = self._evaluate_config(args,
yaml_content,
subcommand='analyze')
self.assertDictEqual(
config.get_all_config()['constraints'], {
constraint_shorthand[2]: {
constraint_shorthand[1]: 40
},
'gpu_used_memory': {
'max': 100
}
})
# check that global constraints are replaced
yaml_content = f"""
analysis_models: test_model
constraints:
{constraint_shorthand[2]}:
{constraint_shorthand[1]} : 100
"""
config = self._evaluate_config(args,
yaml_content,
subcommand='analyze')
self.assertDictEqual(
config.get_all_config()['constraints'],
{constraint_shorthand[2]: {
constraint_shorthand[1]: 40
}})
def test_triton_server_flags(self):
args = [
'model-analyzer', 'profile', '--model-repository', 'cli_repository',
'-f', 'path-to-config-file'
]
yaml_content = """
profile_models: model1, model2
triton_server_flags:
strict-model-config: false
backend-config: test_backend_config
"""
config = self._evaluate_config(args, yaml_content)
self.assertDictEqual(config.get_all_config()['triton_server_flags'], {
'strict-model-config': 'False',
'backend-config': 'test_backend_config'
})
yaml_content = """
profile_models: model1, model2
triton_server_flags:
disallowed-config-option: some_value
backend-config: test_backend_config
"""
with self.assertRaises(TritonModelAnalyzerException):
config = self._evaluate_config(args, yaml_content)
yaml_content = """
profile_models:
model1:
triton_server_flags:
strict_model_config: false
backend_config: test_backend_config
"""
config = self._evaluate_config(args, yaml_content)
self.assertDictEqual(
config.get_all_config()['profile_models'][0].triton_server_flags(),
{
'strict_model_config': 'False',
'backend_config': 'test_backend_config'
})
def test_triton_server_environment(self):
args = [
'model-analyzer', 'profile', '--model-repository', 'cli_repository',
'-f', 'path-to-config-file'
]
yaml_content = """
profile_models: model1, model2
triton_server_environment:
LD_PRELOAD: libtest.so
LD_LIBRARY_PATH: /path/to/test/lib
"""
config = self._evaluate_config(args, yaml_content)
self.assertDictEqual(
config.get_all_config()['triton_server_environment'], {
'LD_PRELOAD': 'libtest.so',
'LD_LIBRARY_PATH': '/path/to/test/lib'
})
yaml_content = """
profile_models:
model1:
triton_server_environment:
LD_PRELOAD: libtest.so
LD_LIBRARY_PATH: /path/to/test/lib
"""
config = self._evaluate_config(args, yaml_content)
self.assertDictEqual(
config.get_all_config()['profile_models']
[0].triton_server_environment(), {
'LD_PRELOAD': 'libtest.so',
'LD_LIBRARY_PATH': '/path/to/test/lib'
})
def test_report_configs(self):
args = [
'model-analyzer', 'report', '--report-model-configs', 'test-model'
]
yaml_content = ""
config = self._evaluate_config(args, yaml_content, subcommand='report')
self.assertEqual(
config.get_all_config()['report_model_configs']
[0].model_config_name(), 'test-model')
# check output format
args = [
'model-analyzer', 'report', '--report-model-configs', 'test-model',
'--output-formats', 'pdf'
]
config = self._evaluate_config(args, yaml_content, subcommand='report')
self.assertEqual(config.get_all_config()['output_formats'], ['pdf'])
args = [
'model-analyzer', 'report', '--report-model-configs', 'test-model',
'--output-formats', 'pdf,csv,svg'
]
config = self._evaluate_config(args, yaml_content, subcommand='report')
self.assertEqual(config.get_all_config()['output_formats'],
['pdf', 'csv', 'svg'])
# Check yaml report model config
args = ['model-analyzer', 'report', '-f', 'path-to-config-file']
yaml_content = """
report_model_configs:
- test_model_i0
output_formats:
- pdf
- csv
- png
"""
config = self._evaluate_config(args, yaml_content, subcommand='report')
self.assertEqual(
config.get_all_config()['report_model_configs']
[0].model_config_name(), 'test_model_i0')
self.assertEqual(config.get_all_config()['output_formats'],
['pdf', 'csv', 'png'])
# Check plots
args = ['model-analyzer', 'report', '-f', 'path-to-config-file']
yaml_content = """
report_model_configs:
- test_model_i0
- test_model_i1
plots:
throughput_v_latency:
title: Throughput vs. Latency
x_axis: perf_latency_p99
y_axis: perf_throughput
monotonic: True
"""
config = self._evaluate_config(args, yaml_content, subcommand='report')
self.assertEqual(
config.get_all_config()['report_model_configs']
[0].model_config_name(), 'test_model_i0')
self.assertEqual(
config.get_all_config()['report_model_configs']
[1].model_config_name(), 'test_model_i1')
expected_config_plot = {
'throughput_v_latency': {
'title': 'Throughput vs. Latency',
'x_axis': 'perf_latency_p99',
'y_axis': 'perf_throughput',
'monotonic': True
}
}
config_plot = config.get_all_config()['plots'][0]
config_plot_dict = {
config_plot.name(): {
'title': config_plot.title(),
'x_axis': config_plot.x_axis(),
'y_axis': config_plot.y_axis(),
'monotonic': config_plot.monotonic()
}
}
self.assertDictEqual(config_plot_dict, expected_config_plot)
for report_model_config in config.report_model_configs:
config_plot = report_model_config.plots()[0]
config_plot_dict = {
config_plot.name(): {
'title': config_plot.title(),
'x_axis': config_plot.x_axis(),
'y_axis': config_plot.y_axis(),
'monotonic': config_plot.monotonic()
}
}
self.assertDictEqual(config_plot_dict, expected_config_plot)
# Check individual plots
yaml_content = """
report_model_configs:
test_model_i0:
plots:
model_specific_throughput_v_latency:
title: model specific title
x_axis: perf_latency_p99
y_axis: perf_throughput
monotonic: True
plots:
throughput_v_latency:
title: Throughput vs. Latency
x_axis: perf_latency_p99
y_axis: perf_throughput
monotonic: True
"""
config = self._evaluate_config(args, yaml_content, subcommand='report')
global_config_plot = config.get_all_config()['plots'][0]
global_config_plot_dict = {
config_plot.name(): {
'title': global_config_plot.title(),
'x_axis': global_config_plot.x_axis(),
'y_axis': global_config_plot.y_axis(),
'monotonic': global_config_plot.monotonic()
}
}
self.assertDictEqual(expected_config_plot, global_config_plot_dict)
model_specific_plot = config.get_all_config(
)['report_model_configs'][0].plots()[0]
self.assertEqual(model_specific_plot.name(),
'model_specific_throughput_v_latency')
self.assertEqual(model_specific_plot.title(), 'model specific title')
if __name__ == '__main__':
unittest.main()
| 38.089406
| 80
| 0.503424
| 5,299
| 62,200
| 5.552368
| 0.063219
| 0.038882
| 0.02804
| 0.038135
| 0.829515
| 0.789545
| 0.753484
| 0.725681
| 0.704337
| 0.677894
| 0
| 0.018241
| 0.405965
| 62,200
| 1,632
| 81
| 38.112745
| 0.778045
| 0.029502
| 0
| 0.706289
| 0
| 0
| 0.276153
| 0.01463
| 0
| 0
| 0
| 0
| 0.100898
| 1
| 0.015204
| false
| 0
| 0.014513
| 0
| 0.031099
| 0.008293
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
bebd653acd5201203de8228314fd990ec4dc5e60
| 14,456
|
py
|
Python
|
tests/metrics/test_axiomatic_metrics.py
|
sebastian-lapuschkin/Quantus
|
c3b8a9fb2018f34bd89ba38efa2b2b8c38128b3f
|
[
"MIT"
] | null | null | null |
tests/metrics/test_axiomatic_metrics.py
|
sebastian-lapuschkin/Quantus
|
c3b8a9fb2018f34bd89ba38efa2b2b8c38128b3f
|
[
"MIT"
] | null | null | null |
tests/metrics/test_axiomatic_metrics.py
|
sebastian-lapuschkin/Quantus
|
c3b8a9fb2018f34bd89ba38efa2b2b8c38128b3f
|
[
"MIT"
] | null | null | null |
from typing import Union
import numpy as np
import pytest
from pytest_lazyfixture import lazy_fixture
from ..fixtures import *
from ...quantus.metrics import *
from ...quantus.helpers import *
from ...quantus.helpers.explanation_func import explain
from ...quantus.helpers.pytorch_model import PyTorchModel
from ...quantus.helpers.tf_model import TensorFlowModel
@pytest.mark.axiomatic
@pytest.mark.parametrize(
"model,data,params,expected",
[
(
lazy_fixture("load_1d_3ch_conv_model"),
lazy_fixture("almost_uniform_1d"),
{
"normalise": True,
"disable_warnings": False,
"display_progressbar": False,
"a_batch_generate": False,
},
1.0,
),
(
lazy_fixture("load_mnist_model"),
lazy_fixture("load_mnist_images"),
{
"normalise": True,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": False,
"display_progressbar": False,
},
1.0,
),
(
lazy_fixture("load_1d_3ch_conv_model"),
lazy_fixture("almost_uniform_1d"),
{
"abs": True,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": True,
"display_progressbar": False,
"a_batch_generate": False,
},
1.0,
),
(
lazy_fixture("load_mnist_model"),
lazy_fixture("load_mnist_images"),
{
"abs": True,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": True,
"display_progressbar": False,
},
1.0,
),
(
lazy_fixture("load_1d_3ch_conv_model"),
lazy_fixture("almost_uniform_1d"),
{
"abs": False,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": True,
"display_progressbar": False,
"a_batch_generate": False,
},
1.0,
),
(
lazy_fixture("load_mnist_model"),
lazy_fixture("load_mnist_images"),
{
"abs": False,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": True,
"display_progressbar": False,
"a_batch_generate": False,
},
1.0,
),
(
lazy_fixture("load_1d_3ch_conv_model"),
lazy_fixture("almost_uniform_1d"),
{
"normalise": False,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": True,
"display_progressbar": True,
"a_batch_generate": False,
},
1.0,
),
(
lazy_fixture("load_mnist_model"),
lazy_fixture("load_mnist_images"),
{
"normalise": False,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": True,
"display_progressbar": True,
},
1.0,
),
],
)
def test_completeness(
model,
data: np.ndarray,
params: dict,
expected: Union[float, dict, bool],
):
x_batch, y_batch = (
data["x_batch"],
data["y_batch"],
)
if params.get("a_batch_generate", True):
explain = params["explain_func"]
a_batch = explain(
model=model,
inputs=x_batch,
targets=y_batch,
**params,
)
elif "a_batch" in data:
a_batch = data["a_batch"]
else:
a_batch = None
scores = Completeness(**params)(
model=model, x_batch=x_batch, y_batch=y_batch, a_batch=a_batch, **params
)
assert scores is not None, "Test failed."
@pytest.mark.axiomatic
@pytest.mark.parametrize(
"model,data,params,expected",
[
(
lazy_fixture("load_1d_3ch_conv_model"),
lazy_fixture("almost_uniform_1d"),
{
"n_samples": 1,
"normalise": True,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": False,
"display_progressbar": False,
"a_batch_generate": False,
},
1.0,
),
(
lazy_fixture("load_mnist_model"),
lazy_fixture("load_mnist_images"),
{
"n_samples": 1,
"normalise": True,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": False,
"display_progressbar": False,
"a_batch_generate": False,
},
1.0,
),
(
lazy_fixture("load_1d_3ch_conv_model"),
lazy_fixture("almost_uniform_1d"),
{
"n_samples": 1,
"eps": 1e-2,
"normalise": True,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": True,
"display_progressbar": False,
"a_batch_generate": False,
},
1.0,
),
(
lazy_fixture("load_mnist_model"),
lazy_fixture("load_mnist_images"),
{
"n_samples": 1,
"eps": 1e-2,
"normalise": True,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": True,
"display_progressbar": False,
"a_batch_generate": True,
},
1.0,
),
(
lazy_fixture("load_1d_3ch_conv_model"),
lazy_fixture("almost_uniform_1d"),
{
"n_samples": 2,
"normalise": False,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": True,
"display_progressbar": False,
"a_batch_generate": False,
},
1.0,
),
(
lazy_fixture("load_mnist_model"),
lazy_fixture("load_mnist_images"),
{
"n_samples": 2,
"normalise": False,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": True,
"display_progressbar": False,
"a_batch_generate": True,
},
1.0,
),
(
lazy_fixture("load_1d_3ch_conv_model"),
lazy_fixture("almost_uniform_1d"),
{
"n_samples": 1,
"eps": 1e-10,
"normalise": True,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": True,
"display_progressbar": True,
"a_batch_generate": False,
},
1.0,
),
(
lazy_fixture("load_mnist_model"),
lazy_fixture("load_mnist_images"),
{
"n_samples": 1,
"eps": 1e-10,
"normalise": True,
"explain_func": explain,
"method": "Saliency",
"disable_warnings": True,
"display_progressbar": True,
"a_batch_generate": True,
},
1.0,
),
],
)
def test_non_sensitivity(
model,
data: np.ndarray,
params: dict,
expected: Union[float, dict, bool],
):
x_batch, y_batch = (
data["x_batch"],
data["y_batch"],
)
if params.get("a_batch_generate", True):
explain = params["explain_func"]
a_batch = explain(
model=model,
inputs=x_batch,
targets=y_batch,
**params,
)
elif "a_batch" in data:
a_batch = data["a_batch"]
else:
a_batch = None
scores = NonSensitivity(**params)(
model=model, x_batch=x_batch, y_batch=y_batch, a_batch=a_batch, **params
)
assert scores is not None, "Test failed."
@pytest.mark.axiomatic
@pytest.mark.parametrize(
"model,data,params,expected",
[
(
lazy_fixture("load_1d_3ch_conv_model"),
lazy_fixture("almost_uniform_1d"),
{
"abs": False,
"normalise": False,
"explain_func": explain,
"method": "Gradient",
"input_shift": -1,
"disable_warnings": False,
"display_progressbar": False,
"a_batch_generate": False,
},
{"dtypes": [True, False]},
),
(
lazy_fixture("load_mnist_model"),
lazy_fixture("load_mnist_images"),
{
"abs": False,
"normalise": False,
"explain_func": explain,
"method": "Gradient",
"input_shift": -1,
"disable_warnings": False,
"display_progressbar": False,
"features_in_step": 112,
},
{"dtypes": [True, False]},
),
(
lazy_fixture("load_1d_3ch_conv_model"),
lazy_fixture("almost_uniform_1d"),
{
"abs": False,
"normalise": False,
"explain_func": explain,
"method": "InputxGradient",
"input_shift": -1,
"disable_warnings": True,
"display_progressbar": False,
"a_batch_generate": False,
},
{"dtypes": [True, False]},
),
(
lazy_fixture("load_mnist_model"),
lazy_fixture("load_mnist_images"),
{
"abs": False,
"normalise": False,
"explain_func": explain,
"method": "InputxGradient",
"input_shift": -1,
"disable_warnings": True,
"display_progressbar": False,
"features_in_step": 112,
},
{"dtypes": [True, False]},
),
(
lazy_fixture("load_1d_3ch_conv_model"),
lazy_fixture("almost_uniform_1d"),
{
"abs": False,
"normalise": False,
"explain_func": explain,
"method": "Saliency",
"input_shift": -1,
"disable_warnings": True,
"display_progressbar": False,
"a_batch_generate": False,
},
{"dtypes": [True, False]},
),
(
lazy_fixture("load_mnist_model"),
lazy_fixture("load_mnist_images"),
{
"abs": False,
"normalise": False,
"explain_func": explain,
"method": "Saliency",
"input_shift": -1,
"disable_warnings": True,
"display_progressbar": False,
"features_in_step": 112,
},
{"dtypes": [True, False]},
),
(
lazy_fixture("load_1d_3ch_conv_model"),
lazy_fixture("almost_uniform_1d"),
{
"abs": True,
"normalise": True,
"explain_func": explain,
"method": "Saliency",
"input_shift": -1,
"disable_warnings": True,
"display_progressbar": False,
"a_batch_generate": False,
},
{"dtypes": [True, False]},
),
(
lazy_fixture("load_mnist_model"),
lazy_fixture("load_mnist_images"),
{
"abs": True,
"normalise": True,
"explain_func": explain,
"method": "Saliency",
"input_shift": -1,
"disable_warnings": True,
"display_progressbar": False,
"features_in_step": 112,
},
{"dtypes": [True, False]},
),
(
lazy_fixture("load_1d_3ch_conv_model"),
lazy_fixture("almost_uniform_1d"),
{
"abs": False,
"normalise": False,
"explain_func": explain,
"method": "InputXGradient",
"input_shift": -1,
"disable_warnings": True,
"display_progressbar": True,
"a_batch_generate": False,
},
{"dtypes": [True, False]},
),
(
lazy_fixture("load_mnist_model"),
lazy_fixture("load_mnist_images"),
{
"abs": False,
"normalise": False,
"explain_func": explain,
"method": "InputXGradient",
"input_shift": -1,
"disable_warnings": True,
"display_progressbar": True,
"features_in_step": 112,
},
{"dtypes": [True, False]},
),
],
)
def test_input_invariance(
model,
data: np.ndarray,
params: dict,
expected: Union[float, dict, bool],
):
x_batch, y_batch = (
data["x_batch"],
data["y_batch"],
)
if params.get("a_batch_generate", True):
explain = params["explain_func"]
a_batch = explain(
model=model,
inputs=x_batch,
targets=y_batch,
**params,
)
elif "a_batch" in data:
a_batch = data["a_batch"]
else:
a_batch = None
scores = InputInvariance(**params)(
model=model,
x_batch=x_batch,
y_batch=y_batch,
a_batch=a_batch,
**params,
)
assert np.all([s in expected["dtypes"] for s in scores]), "Test failed."
| 29.744856
| 80
| 0.45047
| 1,203
| 14,456
| 5.086451
| 0.079801
| 0.095277
| 0.095604
| 0.084981
| 0.929891
| 0.927276
| 0.926295
| 0.921066
| 0.921066
| 0.921066
| 0
| 0.013925
| 0.433661
| 14,456
| 485
| 81
| 29.806186
| 0.73348
| 0
| 0
| 0.762105
| 0
| 0
| 0.253874
| 0.02518
| 0
| 0
| 0
| 0
| 0.006316
| 1
| 0.006316
| false
| 0
| 0.021053
| 0
| 0.027368
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22925c5768aab8517dd909dfc3b61799a6ab3354
| 12,923
|
py
|
Python
|
ttax/ops_test.py
|
sumiya11/ttax
|
d0e073222ac08838a022bef9744efe97405556f9
|
[
"MIT"
] | null | null | null |
ttax/ops_test.py
|
sumiya11/ttax
|
d0e073222ac08838a022bef9744efe97405556f9
|
[
"MIT"
] | null | null | null |
ttax/ops_test.py
|
sumiya11/ttax
|
d0e073222ac08838a022bef9744efe97405556f9
|
[
"MIT"
] | null | null | null |
from absl.testing import absltest
from absl.testing import parameterized
import numpy as np
import jax
import jax.numpy as jnp
import jax.test_util as jtu
from jax.config import config
from ttax.base_class import TT
from ttax.base_class import TTMatrix
from ttax import random_
from ttax import ops
config.parse_flags_with_absl()
class TTTensorTest(jtu.JaxTestCase):
def testFullTensor2d(self):
np.random.seed(1)
for rank in [1, 2]:
a = np.random.rand(10, rank)
b = np.random.rand(rank, 9)
tt_cores = (a.reshape(1, 10, rank), b.reshape(rank, 9, 1))
desired = np.dot(a, b)
tt_tens = TT(tt_cores)
actual = ops.full(tt_tens)
self.assertAllClose(desired, actual)
def testFullTensor2dBatch(self):
np.random.seed(1)
for rank in [1, 2]:
a = np.random.rand(3, 10, rank)
b = np.random.rand(3, rank, 9)
tt_cores = (a.reshape(3, 1, 10, rank), b.reshape(3, rank, 9, 1))
desired = np.einsum('bij,bjk->bik', a, b)
tt_tens = TT(tt_cores)
actual = ops.full(tt_tens)
self.assertAllClose(desired, actual)
def testMultiply(self):
# Multiply two TT-tensors.
rng1, rng2 = jax.random.split(jax.random.PRNGKey(0))
dtype = jnp.float32
tt_a = random_.tensor(rng1, (1, 2, 3, 4), tt_rank=2, dtype=dtype)
tt_b = random_.tensor(rng2, (1, 2, 3, 4), tt_rank=[1, 1, 4, 3, 1],
dtype=dtype)
res_actual1 = ops.full(ops.multiply(tt_a, tt_b))
res_actual2 = ops.full(tt_a * tt_b)
res_desired = ops.full(tt_a) * ops.full(tt_b)
self.assertAllClose(res_actual1, res_desired)
self.assertAllClose(res_actual2, res_desired)
def testMultiplyBatch(self):
# Multiply two batches of TT-tensors.
rng1, rng2 = jax.random.split(jax.random.PRNGKey(0))
dtype = jnp.float32
tt_a = random_.tensor(rng1, (1, 2, 3, 4), tt_rank=2, batch_shape=(3,),
dtype=dtype)
tt_b = random_.tensor(rng2, (1, 2, 3, 4), tt_rank=[1, 1, 4, 3, 1],
batch_shape=(3,), dtype=dtype)
res_actual1 = ops.full(ops.multiply(tt_a, tt_b))
res_actual2 = ops.full(tt_a * tt_b)
res_desired = ops.full(tt_a) * ops.full(tt_b)
self.assertAllClose(res_actual1, res_desired)
self.assertAllClose(res_actual2, res_desired)
def testFlatInner(self):
# Multiply two TT-tensors.
rng1, rng2 = jax.random.split(jax.random.PRNGKey(0))
dtype = jnp.float32
tt_a = random_.tensor(rng1, (1, 2, 3, 4), tt_rank=2, dtype=dtype)
tt_b = random_.tensor(rng2, (1, 2, 3, 4), tt_rank=[1, 1, 4, 3, 1], dtype=dtype)
res_actual = ops.flat_inner(tt_a, tt_b)
res_desired = jnp.sum(ops.full(tt_a) * ops.full(tt_b))
self.assertAllClose(res_actual, res_desired)
def testAdd(self):
# Add two TT-tensors.
rng1, rng2 = jax.random.split(jax.random.PRNGKey(0))
dtype = jnp.float32
tt_a = random_.tensor(rng1, (2, 1, 3, 4), tt_rank=2, dtype=dtype)
tt_b = random_.tensor(rng2, (2, 1, 3, 4), tt_rank=[1, 2, 4, 3, 1],
dtype=dtype)
res_actual1 = ops.full(ops.add(tt_a, tt_b))
res_actual2 = ops.full(tt_a + tt_b)
res_desired = ops.full(tt_a) + ops.full(tt_b)
self.assertAllClose(res_actual1, res_desired)
self.assertAllClose(res_actual2, res_desired)
def testAddSameBatchSize(self):
# Add two batches of TT-tensors.
rng1, rng2 = jax.random.split(jax.random.PRNGKey(0))
dtype = jnp.float32
tt_a = random_.tensor(rng1, (2, 1, 3, 4), tt_rank=2, batch_shape=(3,),
dtype=dtype)
tt_b = random_.tensor(rng2, (2, 1, 3, 4), tt_rank=[1, 2, 4, 3, 1],
batch_shape=(3,), dtype=dtype)
res_actual1 = ops.full(ops.add(tt_a, tt_b))
res_actual2 = ops.full(tt_a + tt_b)
res_desired = ops.full(tt_a) + ops.full(tt_b)
self.assertAllClose(res_actual1, res_desired)
self.assertAllClose(res_actual2, res_desired)
def testAddBroadcasting(self):
# Sum two TT-tensors with broadcasting.
rng1, rng2 = jax.random.split(jax.random.PRNGKey(0))
dtype = jnp.float32
tt_a = random_.tensor(rng1, (2, 1, 4), tt_rank=2,
batch_shape=(1,), dtype=dtype)
tt_b = random_.tensor(rng2, (2, 1, 4), tt_rank=[1, 2, 4, 1],
batch_shape=(3,), dtype=dtype)
res_actual1 = ops.full(ops.add(tt_a, tt_b))
res_actual2 = ops.full(tt_b + tt_a)
res_desired = ops.full(tt_a) + ops.full(tt_b)
self.assertAllClose(res_actual1, res_desired)
self.assertAllClose(res_actual2, res_desired)
def testMultiplyByScalar(self):
# Multiply batch of TT-tensor by scalar.
c = 4.5
rng = jax.random.PRNGKey(0)
dtype = jnp.float32
tt = random_.tensor(rng, (2, 1, 3, 4), tt_rank=[1, 2, 4, 3, 1],
dtype=dtype)
res_actual1 = ops.full(ops.multiply(tt, c))
res_actual2 = ops.full(tt * c)
res_actual3 = ops.full(c * tt)
res_desired = c * ops.full(tt)
self.assertAllClose(res_actual1, res_desired, rtol=1e-4)
self.assertAllClose(res_actual2, res_desired, rtol=1e-4)
self.assertAllClose(res_actual3, res_desired, rtol=1e-4)
def testMultiplyBatchByScalar(self):
# Multiply batch of TT-tensor by scalar.
c = 4.5
rng = jax.random.PRNGKey(0)
dtype = jnp.float32
tt = random_.tensor(rng, (2, 1, 3, 4), tt_rank=[1, 2, 4, 3, 1],
batch_shape=(3,), dtype=dtype)
res_actual1 = ops.full(ops.multiply(tt, c))
res_actual2 = ops.full(tt * c)
res_actual3 = ops.full(c * tt)
res_desired = c * ops.full(tt)
self.assertAllClose(res_actual1, res_desired, rtol=1e-4)
self.assertAllClose(res_actual2, res_desired, rtol=1e-4)
self.assertAllClose(res_actual3, res_desired, rtol=1e-4)
class TTMatrixTest(jtu.JaxTestCase):
def testFull2d(self):
np.random.seed(1)
for rank in [1, 2]:
a = np.random.rand(9, rank)
b = np.random.rand(rank, 10)
tt_cores = (a.reshape(1, 3, 3, rank), b.reshape(rank, 2, 5, 1))
desired = np.einsum('aijb,bpqc->ipjq', *tt_cores)
desired = desired.reshape(6, 15)
tt_tens = TTMatrix(tt_cores)
actual = ops.full(tt_tens)
self.assertAllClose(desired, actual)
def testFull2dBatch(self):
np.random.seed(1)
for rank in [1, 2]:
a = np.random.rand(7, 9, rank)
b = np.random.rand(7, rank, 10)
tt_cores = (a.reshape(7, 1, 3, 3, rank), b.reshape(7, rank, 2, 5, 1))
desired = np.einsum('taijb,tbpqc->tipjq', *tt_cores)
desired = desired.reshape(7, 6, 15)
tt_tens = TTMatrix(tt_cores)
actual = ops.full(tt_tens)
self.assertAllClose(desired, actual)
def testMatmul(self):
# Multiply two TT-matrices.
rng1, rng2 = jax.random.split(jax.random.PRNGKey(0))
dtype = jnp.float32
left_shape = (2, 3, 4)
sum_shape = (4, 3, 5)
right_shape = (4, 4, 4)
tt_a = random_.matrix(rng1, (left_shape, sum_shape), tt_rank=3, dtype=dtype)
tt_b = random_.matrix(rng2, (sum_shape, right_shape), tt_rank=[1, 4, 3, 1],
dtype=dtype)
res_actual = ops.full(ops.matmul(tt_a, tt_b))
res_desired = ops.full(tt_a) @ ops.full(tt_b)
# TODO: why such low precision?
self.assertAllClose(res_actual, res_desired, rtol=1e-3)
def testMultiply(self):
# Elementwise multiply two TT-matrices.
rng1, rng2 = jax.random.split(jax.random.PRNGKey(0))
dtype = jnp.float32
left_shape = (2, 3, 4)
right_shape = (4, 4, 4)
tt_a = random_.matrix(rng1, (left_shape, right_shape), tt_rank=3,
dtype=dtype)
tt_b = random_.matrix(rng2, (left_shape, right_shape), tt_rank=[1, 4, 3, 1],
dtype=dtype)
res_actual1 = ops.full(ops.multiply(tt_a, tt_b))
res_actual2 = ops.full(tt_a * tt_b)
res_desired = ops.full(tt_a) * ops.full(tt_b)
self.assertAllClose(res_actual1, res_desired, rtol=1e-4)
self.assertAllClose(res_actual2, res_desired, rtol=1e-4)
def testMultiplyBatch(self):
# Elementwise multiply two batches of TT-matrices.
rng1, rng2 = jax.random.split(jax.random.PRNGKey(0))
dtype = jnp.float32
left_shape = (2, 3, 4)
right_shape = (4, 4, 4)
tt_a = random_.matrix(rng1, (left_shape, right_shape), tt_rank=3,
batch_shape=(3,), dtype=dtype)
tt_b = random_.matrix(rng2, (left_shape, right_shape), tt_rank=[1, 4, 3, 1],
batch_shape=(3,), dtype=dtype)
res_actual1 = ops.full(ops.multiply(tt_a, tt_b))
res_actual2 = ops.full(tt_a * tt_b)
res_desired = ops.full(tt_a) * ops.full(tt_b)
# TODO: why such low precision?
self.assertAllClose(res_actual1, res_desired, rtol=1e-3)
self.assertAllClose(res_actual2, res_desired, rtol=1e-3)
def testFlatInner(self):
# Multiply two TT-matrices.
rng1, rng2 = jax.random.split(jax.random.PRNGKey(0))
dtype = jnp.float32
left_shape = (2, 3, 4)
right_shape = (4, 4, 4)
tt_a = random_.matrix(rng1, (left_shape, right_shape), tt_rank=3,
dtype=dtype)
tt_b = random_.matrix(rng2, (left_shape, right_shape), tt_rank=[1, 4, 3, 1],
dtype=dtype)
res_actual = ops.flat_inner(tt_a, tt_b)
res_desired = jnp.sum(ops.full(tt_a) * ops.full(tt_b))
self.assertAllClose(res_actual, res_desired)
def testAdd(self):
# Add two TT-matrices.
rng1, rng2 = jax.random.split(jax.random.PRNGKey(0))
dtype = jnp.float32
left_shape = (2, 3, 4)
right_shape = (4, 4, 4)
tt_a = random_.matrix(rng1, (left_shape, right_shape), tt_rank=3,
dtype=dtype)
tt_b = random_.matrix(rng2, (left_shape, right_shape), tt_rank=[1, 4, 3, 1],
dtype=dtype)
res_actual1 = ops.full(ops.add(tt_a, tt_b))
res_actual2 = ops.full(tt_a + tt_b)
res_desired = ops.full(tt_a) + ops.full(tt_b)
self.assertAllClose(res_actual1, res_desired, rtol=1e-5)
self.assertAllClose(res_actual2, res_desired, rtol=1e-5)
def testAddSameBatchSize(self):
# Add two batches of TT-matrices.
rng1, rng2 = jax.random.split(jax.random.PRNGKey(0))
dtype = jnp.float32
left_shape = (2, 3, 4)
right_shape = (4, 4, 4)
tt_a = random_.matrix(rng1, (left_shape, right_shape), tt_rank=3,
batch_shape=(3,), dtype=dtype)
tt_b = random_.matrix(rng2, (left_shape, right_shape), tt_rank=[1, 4, 3, 1],
batch_shape=(3,), dtype=dtype)
res_actual1 = ops.full(ops.add(tt_a, tt_b))
res_actual2 = ops.full(tt_a + tt_b)
res_desired = ops.full(tt_a) + ops.full(tt_b)
self.assertAllClose(res_actual1, res_desired, rtol=1e-3)
self.assertAllClose(res_actual2, res_desired, rtol=1e-3)
def testAddBroadcasting(self):
# Sum two TT-Matrices with broadcasting.
rng1, rng2 = jax.random.split(jax.random.PRNGKey(0))
dtype = jnp.float32
left_shape = (2, 3, 4)
right_shape = (4, 4, 4)
tt_a = random_.matrix(rng1, (left_shape, right_shape), tt_rank=3,
batch_shape=(3, 1, 3,), dtype=dtype)
tt_b = random_.matrix(rng2, (left_shape, right_shape), tt_rank=[1, 4, 3, 1],
batch_shape=(3, 3, 3), dtype=dtype)
res_actual1 = ops.full(ops.add(tt_a, tt_b))
res_actual2 = ops.full(tt_b + tt_a)
res_desired = ops.full(tt_a) + ops.full(tt_b)
self.assertAllClose(res_actual1, res_desired, rtol=1e-4)
self.assertAllClose(res_actual2, res_desired, rtol=1e-4)
def testMultiplyByScalar(self):
# Multiply TT-matrix by scalar.
c = 4.5
rng = jax.random.PRNGKey(0)
dtype = jnp.float32
left_shape = (2, 3, 4)
right_shape = (4, 4, 4)
tt = random_.matrix(rng, (left_shape, right_shape), tt_rank=3,
dtype=dtype)
res_actual1 = ops.full(ops.multiply(tt, c))
res_actual2 = ops.full(tt * c)
res_actual3 = ops.full(c * tt)
res_desired = c * ops.full(tt)
self.assertAllClose(res_actual1, res_desired, rtol=1e-4)
self.assertAllClose(res_actual2, res_desired, rtol=1e-4)
self.assertAllClose(res_actual3, res_desired, rtol=1e-4)
def testMultiplyBatchByScalar(self):
# Multiply batch of TT-matrix by scalar.
c = 4.5
rng = jax.random.PRNGKey(0)
dtype = jnp.float32
left_shape = (2, 3, 4)
right_shape = (4, 4, 4)
tt = random_.matrix(rng, (left_shape, right_shape), tt_rank=3,
batch_shape=(3, 1, 3,), dtype=dtype)
res_actual1 = ops.full(ops.multiply(tt, c))
res_actual2 = ops.full(tt * c)
res_actual3 = ops.full(c * tt)
res_desired = c * ops.full(tt)
self.assertAllClose(res_actual1, res_desired, rtol=1e-4)
self.assertAllClose(res_actual2, res_desired, rtol=1e-4)
self.assertAllClose(res_actual3, res_desired, rtol=1e-4)
if __name__ == '__main__':
absltest.main(testLoader=jtu.JaxTestLoader())
| 38.576119
| 83
| 0.632438
| 2,012
| 12,923
| 3.87326
| 0.06163
| 0.060182
| 0.055434
| 0.047222
| 0.915694
| 0.892596
| 0.850507
| 0.844861
| 0.8319
| 0.827922
| 0
| 0.050136
| 0.229823
| 12,923
| 334
| 84
| 38.691617
| 0.732844
| 0.047357
| 0
| 0.79927
| 0
| 0
| 0.004312
| 0
| 0
| 0
| 0
| 0.002994
| 0.142336
| 1
| 0.076642
| false
| 0
| 0.040146
| 0
| 0.124088
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
22f76371c7ff176fd357f2c08b221d81d4892ae4
| 17,563
|
py
|
Python
|
ironic/tests/unit/drivers/modules/ucs/test_power.py
|
armohamm/ironic
|
21093ca886ed736a7a25bf5e71e05d41e132fd2f
|
[
"Apache-2.0"
] | 2
|
2019-06-17T21:37:53.000Z
|
2020-07-11T03:58:39.000Z
|
ironic/tests/unit/drivers/modules/ucs/test_power.py
|
armohamm/ironic
|
21093ca886ed736a7a25bf5e71e05d41e132fd2f
|
[
"Apache-2.0"
] | 5
|
2019-08-14T06:46:03.000Z
|
2021-12-13T20:01:25.000Z
|
ironic/tests/unit/drivers/modules/ucs/test_power.py
|
armohamm/ironic
|
21093ca886ed736a7a25bf5e71e05d41e132fd2f
|
[
"Apache-2.0"
] | 6
|
2019-06-13T12:49:33.000Z
|
2021-04-17T16:33:19.000Z
|
# Copyright 2015, Cisco Systems.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test class for UcsPower module."""
import mock
from oslo_config import cfg
from oslo_utils import importutils
from ironic.common import exception
from ironic.common import states
from ironic.conductor import task_manager
from ironic.drivers.modules import fake
from ironic.drivers.modules.ucs import helper as ucs_helper
from ironic.drivers.modules.ucs import power as ucs_power
from ironic.tests.unit.drivers.modules.ucs import test_helper
ucs_error = importutils.try_import('UcsSdk.utils.exception')
CONF = cfg.CONF
class UcsPowerTestCase(test_helper.BaseUcsTest):
def setUp(self):
super(UcsPowerTestCase, self).setUp()
CONF.set_override('max_retry', 2, 'cisco_ucs')
CONF.set_override('action_interval', 0, 'cisco_ucs')
self.interface = ucs_power.Power()
def test_get_properties(self):
expected = ucs_helper.COMMON_PROPERTIES
expected.update(ucs_helper.COMMON_PROPERTIES)
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
# Remove properties from boot and deploy interfaces
task.driver.boot = fake.FakeBoot()
task.driver.deploy = fake.FakeDeploy()
self.assertEqual(expected, task.driver.get_properties())
@mock.patch.object(ucs_helper, 'parse_driver_info',
spec_set=True, autospec=True)
def test_validate(self, mock_parse_driver_info):
mock_parse_driver_info.return_value = {}
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.interface.validate(task)
mock_parse_driver_info.assert_called_once_with(task.node)
@mock.patch.object(ucs_helper, 'parse_driver_info',
spec_set=True, autospec=True)
def test_validate_fail(self, mock_parse_driver_info):
side_effect = exception.InvalidParameterValue('Invalid Input')
mock_parse_driver_info.side_effect = side_effect
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.InvalidParameterValue,
self.interface.validate,
task)
mock_parse_driver_info.assert_called_once_with(task.node)
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
spec_set=True, autospec=True)
def test_get_power_state_up(self, mock_power_helper, mock_helper):
mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
mock_power = mock_power_helper.return_value
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
mock_power.get_power_state.return_value = 'up'
self.assertEqual(states.POWER_ON,
self.interface.get_power_state(task))
mock_power.get_power_state.assert_called_once_with()
mock_power.get_power_state.reset_mock()
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
spec_set=True, autospec=True)
def test_get_power_state_down(self, mock_power_helper, mock_helper):
mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
mock_power = mock_power_helper.return_value
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
mock_power.get_power_state.return_value = 'down'
self.assertEqual(states.POWER_OFF,
self.interface.get_power_state(task))
mock_power.get_power_state.assert_called_once_with()
mock_power.get_power_state.reset_mock()
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
spec_set=True, autospec=True)
def test_get_power_state_error(self, mock_power_helper, mock_helper):
mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
mock_power = mock_power_helper.return_value
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
mock_power.get_power_state.return_value = states.ERROR
self.assertEqual(states.ERROR,
self.interface.get_power_state(task))
mock_power.get_power_state.assert_called_once_with()
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
spec_set=True, autospec=True)
def test_get_power_state_fail(self,
mock_ucs_power,
mock_helper):
mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
power = mock_ucs_power.return_value
power.get_power_state.side_effect = (
ucs_error.UcsOperationError(operation='getting power state',
error='failed'))
with task_manager.acquire(self.context, self.node.uuid,
shared=True) as task:
self.assertRaises(exception.UcsOperationError,
self.interface.get_power_state,
task)
power.get_power_state.assert_called_with()
@mock.patch.object(ucs_power.LOG, 'warning')
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power._wait_for_state_change',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
spec_set=True, autospec=True)
def test_set_power_state(self, mock_power_helper, mock__wait, mock_helper,
mock_log):
target_state = states.POWER_ON
mock_power = mock_power_helper.return_value
mock_power.get_power_state.side_effect = ['down', 'up']
mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
mock__wait.return_value = target_state
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertIsNone(self.interface.set_power_state(task,
target_state))
mock_power.set_power_state.assert_called_once_with('up')
mock_power.get_power_state.assert_called_once_with()
mock__wait.assert_called_once_with(target_state, mock_power)
self.assertFalse(mock_log.called)
@mock.patch.object(ucs_power.LOG, 'warning')
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power._wait_for_state_change',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
spec_set=True, autospec=True)
def test_set_power_state_timeout(self, mock_power_helper, mock__wait,
mock_helper, mock_log):
target_state = states.POWER_ON
mock_power = mock_power_helper.return_value
mock_power.get_power_state.side_effect = ['down', 'up']
mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
mock__wait.return_value = target_state
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertIsNone(self.interface.set_power_state(task,
target_state,
timeout=23))
mock_power.set_power_state.assert_called_once_with('up')
mock_power.get_power_state.assert_called_once_with()
mock__wait.assert_called_once_with(target_state, mock_power)
self.assertTrue(mock_log.called)
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
spec_set=True, autospec=True)
def test_set_power_state_fail(self, mock_power_helper, mock_helper):
mock_power = mock_power_helper.return_value
mock_power.set_power_state.side_effect = (
ucs_error.UcsOperationError(operation='setting power state',
error='failed'))
mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertRaises(exception.UcsOperationError,
self.interface.set_power_state,
task, states.POWER_OFF)
mock_power.set_power_state.assert_called_once_with('down')
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
def test_set_power_state_invalid_state(self, mock_helper):
mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertRaises(exception.InvalidParameterValue,
self.interface.set_power_state,
task, states.ERROR)
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
spec_set=True, autospec=True)
def test__wait_for_state_change_already_target_state(
self,
mock_ucs_power,
mock_helper):
mock_power = mock_ucs_power.return_value
target_state = states.POWER_ON
mock_power.get_power_state.return_value = 'up'
mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
self.assertEqual(states.POWER_ON,
ucs_power._wait_for_state_change(
target_state, mock_power))
mock_power.get_power_state.assert_called_with()
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
spec_set=True, autospec=True)
def test__wait_for_state_change_exceed_iterations(
self,
mock_power_helper,
mock_helper):
mock_power = mock_power_helper.return_value
target_state = states.POWER_ON
mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
mock_power.get_power_state.side_effect = (
['down', 'down', 'down', 'down'])
self.assertEqual(states.ERROR,
ucs_power._wait_for_state_change(
target_state, mock_power)
)
mock_power.get_power_state.assert_called_with()
self.assertEqual(4, mock_power.get_power_state.call_count)
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power._wait_for_state_change',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
spec_set=True, autospec=True)
def test_set_and_wait_for_state_change_fail(
self,
mock_power_helper,
mock__wait,
mock_helper):
target_state = states.POWER_ON
mock_power = mock_power_helper.return_value
mock_power.get_power_state.return_value = 'down'
mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
mock__wait.return_value = states.POWER_OFF
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertRaises(exception.PowerStateFailure,
self.interface.set_power_state,
task,
target_state)
mock_power.set_power_state.assert_called_once_with('up')
mock_power.get_power_state.assert_called_once_with()
mock__wait.assert_called_once_with(target_state, mock_power)
@mock.patch.object(ucs_power.LOG, 'warning')
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power._wait_for_state_change',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
spec_set=True, autospec=True)
def test_reboot(self, mock_power_helper, mock__wait, mock_helper,
mock_log):
mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
mock_power = mock_power_helper.return_value
mock__wait.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertIsNone(self.interface.reboot(task))
mock_power.reboot.assert_called_once_with()
self.assertFalse(mock_log.called)
@mock.patch.object(ucs_power.LOG, 'warning')
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power._wait_for_state_change',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
spec_set=True, autospec=True)
def test_reboot_timeout(self, mock_power_helper, mock__wait, mock_helper,
mock_log):
mock_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
mock_power = mock_power_helper.return_value
mock__wait.return_value = states.POWER_ON
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertIsNone(self.interface.reboot(task, timeout=88))
mock_power.reboot.assert_called_once_with()
self.assertTrue(mock_log.called)
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
spec_set=True, autospec=True)
def test_reboot_fail(self, mock_power_helper,
mock_ucs_helper):
mock_ucs_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
mock_power = mock_power_helper.return_value
mock_power.reboot.side_effect = (
ucs_error.UcsOperationError(operation='rebooting', error='failed'))
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertRaises(exception.UcsOperationError,
self.interface.reboot,
task
)
mock_power.reboot.assert_called_once_with()
@mock.patch('ironic.drivers.modules.ucs.helper.ucs_helper',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power._wait_for_state_change',
spec_set=True, autospec=True)
@mock.patch('ironic.drivers.modules.ucs.power.ucs_power.UcsPower',
spec_set=True, autospec=True)
def test_reboot__wait_state_change_fail(self, mock_power_helper,
mock__wait,
mock_ucs_helper):
mock_ucs_helper.generate_ucsm_handle.return_value = (True, mock.Mock())
mock_power = mock_power_helper.return_value
mock__wait.return_value = states.ERROR
with task_manager.acquire(self.context, self.node.uuid,
shared=False) as task:
self.assertRaises(exception.PowerStateFailure,
self.interface.reboot,
task)
mock_power.reboot.assert_called_once_with()
| 50.613833
| 79
| 0.638672
| 2,096
| 17,563
| 5.038645
| 0.084924
| 0.061358
| 0.071963
| 0.080579
| 0.848026
| 0.837421
| 0.815548
| 0.802576
| 0.767162
| 0.75864
| 0
| 0.001174
| 0.272277
| 17,563
| 346
| 80
| 50.760116
| 0.825131
| 0.037864
| 0
| 0.739274
| 0
| 0
| 0.115982
| 0.102239
| 0
| 0
| 0
| 0
| 0.145215
| 1
| 0.062706
| false
| 0
| 0.036304
| 0
| 0.10231
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
fe009856e46d7c24c6f990b77750d96564081aa2
| 79,277
|
py
|
Python
|
bsimp.py
|
britishcoffee/BSImp
|
ee5137fe6fa989dd18b7ed185a3a6f232107200e
|
[
"MIT"
] | null | null | null |
bsimp.py
|
britishcoffee/BSImp
|
ee5137fe6fa989dd18b7ed185a3a6f232107200e
|
[
"MIT"
] | null | null | null |
bsimp.py
|
britishcoffee/BSImp
|
ee5137fe6fa989dd18b7ed185a3a6f232107200e
|
[
"MIT"
] | null | null | null |
# Dec 21 to mod for optional outputting original counts
##
#---------------------------------------------------------------------
# SERVER only input all files (.bam and .fa) output MeH matrix in .csv
# Oct 19, 2021 ML after imputation test
# github
#---------------------------------------------------------------------
import random
import math
import pysam
import csv
import sys
import pandas as pd
import numpy as np
import datetime
import time as t
from collections import Counter, defaultdict, OrderedDict
#---------------------------------------
# Functions definition
#---------------------------------------
def open_log(fname):
open_log.logfile = open(fname, 'w', 1)
def logm(message):
log_message = "[%s] %s\n" % (datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'), message)
print(log_message),
open_log.logfile.write(log_message)
def close_log():
open_log.logfile.close()
# Check whether a window has enough reads for complete/impute
def enough_reads(window,w,complete):
temp=np.isnan(window).sum(axis=1)==0
if complete: # For heterogeneity estimation
return temp.sum()>=3
else: # for imputation
tempw1=np.isnan(window).sum(axis=1)==1
#return temp.sum()>=2**(w-2) and tempw1.sum()>0
return temp.sum()>=2 and tempw1.sum()>0
def impute(window,w):
full_ind=np.where(np.isnan(window).sum(axis=1)==0)[0]
part_ind=np.where(np.isnan(window).sum(axis=1)==1)[0]
for i in range(len(part_ind)):
sam = []
# which column is nan
pos=np.where(np.isnan(window[part_ind[i],:]))[0]
if np.unique(window[np.where(np.invert(np.isnan(window[:,pos])))[0],pos]).shape[0]==1:
window[part_ind[i],pos]=window[np.where(np.invert(np.isnan(window[:,pos])))[0],pos][0]
else:
#print("win_part i pos =",window[part_ind[i],pos])
for j in range(len(full_ind)):
if (window[part_ind[i],:]==window[full_ind[j],:]).sum()==w-1:
sam.append(j)
if len(sam)>0:
s1=random.sample(sam, 1)
s=window[full_ind[s1],pos]
else:
s=random.sample(window[np.where(np.invert(np.isnan(window[:,pos])))[0],pos].tolist(), k=1)[0]
window[part_ind[i],pos]=np.float64(s)
#print("win_part i =",window[part_ind[i],pos])
#print("s = ",np.float64(s))
return window
def outwindow(pat,patori,pos,chrom,w,M,UM,Mo,UMo,mC=4,strand='f',optional=False):
# get complete reads
tempori=np.isnan(patori).sum(axis=1)==0
patori=patori[np.where(tempori)[0],:]
countori=np.zeros((2**w,1))
temp=np.isnan(pat).sum(axis=1)==0
pat=pat[np.where(temp)[0],:]
count=np.zeros((2**w,1))
# m=np.shape(pat)[0]
pat=np.array(pat)
if w==2:
pat = Counter([str(i[0])+str(i[1]) for i in pat.astype(int).tolist()])
count=np.array([float(pat[i]) for i in ['00','10','01','11']])
if optional:
patori = Counter([str(i[0])+str(i[1]) for i in patori.astype(int).tolist()])
countori=np.array([float(patori[i]) for i in ['00','10','01','11']])
if w==3:
pat = Counter([str(i[0])+str(i[1])+str(i[2]) for i in pat.astype(int).tolist()])
count=np.array([float(pat[i]) for i in ['000','100','010','110','001','101','011','111']])
if optional:
patori = Counter([str(i[0])+str(i[1])+str(i[2]) for i in patori.astype(int).tolist()])
countori=np.array([float(patori[i]) for i in ['000','100','010','110','001','101','011','111']])
if w==4:
pat = Counter([str(i[0])+str(i[1])+str(i[2])+str(i[3]) for i in pat.astype(int).tolist()])
count=np.array([float(pat[i]) for i in ['0000','1000','0100','1100','0010','1010','0110','1110','0001',\
'1001','0101','1101','0011','1011','0111','1111']])
if optional:
patori = Counter([str(i[0])+str(i[1])+str(i[2])+str(i[3]) for i in patori.astype(int).tolist()])
countori=np.array([float(patori[i]) for i in ['0000','1000','0100','1100','0010','1010','0110','1110','0001',\
'1001','0101','1101','0011','1011','0111','1111']])
if w==5:
pat = Counter([str(i[0])+str(i[1])+str(i[2])+str(i[3])+str(i[4]) for i in pat.astype(int).tolist()])
count=np.array([float(pat[i]) for i in ['00000','10000','01000','11000','00100','10100','01100','11100','00010',\
'10010','01010','11010','00110','10110','01110','11110','00001','10001','01001','11001','00101',\
'10101','01101','11101','00011','10011','01011','11011','00111','10111','01111','11111']])
if optional:
patori = Counter([str(i[0])+str(i[1])+str(i[2])+str(i[3])+str(i[4]) for i in patori.astype(int).tolist()])
countori = np.array([float(patori[i]) for i in ['00000','10000','01000','11000','00100','10100','01100','11100','00010',\
'10010','01010','11010','00110','10110','01110','11110','00001','10001','01001','11001','00101',\
'10101','01101','11101','00011','10011','01011','11011','00111','10111','01111','11111']])
if w==6:
pat = Counter([str(i[0])+str(i[1])+str(i[2])+str(i[3])+str(i[4])+str(i[5]) for i in pat.astype(int).tolist()])
count=np.array([float(pat[i]) for i in ['000000','100000','010000','110000','001000','101000','011000','111000','000100',\
'100100','010100','110100','001100','101100','011100','111100','000010','100010','010010','110010','001010',\
'101010','011010','111010','000110', '100110','010110','110110','001110','101110','011110','111110',\
'000001','100001','010001','110001','001001','101001','011001','111001','000101',\
'100101','010101','110101','001101','101101','011101','111101','000011','100011','010011','110011','001011',\
'101011','011011','111011','000111', '100111','010111','110111','001111','101111','011111','111111']])
if optional:
patori = Counter([str(i[0])+str(i[1])+str(i[2])+str(i[3])+str(i[4])+str(i[5]) for i in patori.astype(int).tolist()])
countori = np.array([float(patori[i]) for i in ['000000','100000','010000','110000','001000','101000','011000','111000','000100',\
'100100','010100','110100','001100','101100','011100','111100','000010','100010','010010','110010','001010',\
'101010','011010','111010','000110', '100110','010110','110110','001110','101110','011110','111110',\
'000001','100001','010001','110001','001001','101001','011001','111001','000101',\
'100101','010101','110101','001101','101101','011101','111101','000011','100011','010011','110011','001011',\
'101011','011011','111011','000111', '100111','010111','110111','001111','101111','011111','111111']])
count=count.reshape(2**w)
count=np.concatenate((count[[0]],count))
countori=countori.reshape(2**w)
countori=np.concatenate((countori[[0]],countori))
if w==3 and not optional:
opt=pd.DataFrame({'chrom':chrom,'pos':pos,'p01':count[1],'p02':count[2],'p03':count[3],'p04':count[4],\
'p05':count[5],'p06':count[6],'p07':count[7],'p08':count[8],'M':M,'UM':UM,'strand':strand}, index=[0])
if w==3 and optional:
opt=pd.DataFrame({'chrom':chrom,'pos':pos,'p01':count[1],'p02':count[2],'p03':count[3],'p04':count[4],\
'p05':count[5],'p06':count[6],'p07':count[7],'p08':count[8],'p01o':countori[1],'p02o':countori[2],'p03o':countori[3],'p04o':countori[4],\
'p05o':countori[5],'p06o':countori[6],'p07o':countori[7],'p08o':countori[8],'M':M,'UM':UM,'Mo':Mo,'UMo':UMo,'strand':strand}, index=[0])
if w==4 and not optional:
opt=pd.DataFrame({'chrom':chrom,'pos':pos,'p01':count[1],'p02':count[2],'p03':count[3],'p04':count[4],\
'p05':count[5],'p06':count[6],'p07':count[7],'p08':count[8],'p09':count[9],'p10':count[10],\
'p11':count[11],'p12':count[12],'p13':count[13],'p14':count[14],'p15':count[15],\
'p16':count[16],'M':M,'UM':UM,'strand':strand}, index=[0])
if w==4 and optional:
opt=pd.DataFrame({'chrom':chrom,'pos':pos,'p01':count[1],'p02':count[2],'p03':count[3],'p04':count[4],\
'p05':count[5],'p06':count[6],'p07':count[7],'p08':count[8],'p09':count[9],'p10':count[10],\
'p11':count[11],'p12':count[12],'p13':count[13],'p14':count[14],'p15':count[15],\
'p16':count[16],'p01o':countori[1],'p02o':countori[2],'p03o':countori[3],'p04o':countori[4],\
'p05o':countori[5],'p06o':countori[6],'p07o':countori[7],'p08o':countori[8],'p09o':countori[9],'p10o':countori[10],\
'p11o':countori[11],'p12o':countori[12],'p13o':countori[13],'p14o':countori[14],'p15o':countori[15],\
'p16o':countori[16],'M':M,'UM':UM,'Mo':Mo,'UMo':UMo,'strand':strand}, index=[0])
if w==5 and not optional:
opt=pd.DataFrame({'chrom':chrom,'pos':pos,'p01':count[1],'p02':count[2],'p03':count[3],'p04':count[4],\
'p05':count[5],'p06':count[6],'p07':count[7],'p08':count[8],'p09':count[9],'p10':count[10],\
'p11':count[11],'p12':count[12],'p13':count[13],'p14':count[14],'p15':count[15],\
'p16':count[16],'p17':count[17],'p18':count[18],'p19':count[19],'p20':count[20],\
'p21':count[21],'p22':count[22],'p23':count[23],'p24':count[24],'p25':count[25],\
'p26':count[26],'p27':count[27],'p28':count[28],'p29':count[29],'p30':count[30],\
'p31':count[31],'p32':count[32],'M':M,'UM':UM,'strand':strand}, index=[0])
if w==5 and optional:
opt=pd.DataFrame({'chrom':chrom,'pos':pos,'p01':count[1],'p02':count[2],'p03':count[3],'p04':count[4],\
'p05':count[5],'p06':count[6],'p07':count[7],'p08':count[8],'p09':count[9],'p10':count[10],\
'p11':count[11],'p12':count[12],'p13':count[13],'p14':count[14],'p15':count[15],\
'p16':count[16],'p17':count[17],'p18':count[18],'p19':count[19],'p20':count[20],\
'p21':count[21],'p22':count[22],'p23':count[23],'p24':count[24],'p25':count[25],\
'p26':count[26],'p27':count[27],'p28':count[28],'p29':count[29],'p30':count[30],\
'p31':count[31],'p32':count[32],'p01o':countori[1],'p02o':countori[2],'p03o':countori[3],'p04o':countori[4],\
'p05o':countori[5],'p06o':countori[6],'p07o':countori[7],'p08o':countori[8],'p09o':countori[9],'p10o':countori[10],\
'p11o':countori[11],'p12o':countori[12],'p13o':countori[13],'p14o':countori[14],'p15o':countori[15],\
'p16o':countori[16],'p17o':countori[17],'p18o':countori[18],'p19o':countori[19],'p20o':countori[20],\
'p21o':countori[21],'p22o':countori[22],'p23o':countori[23],'p24o':countori[24],'p25o':countori[25],\
'p26o':countori[26],'p27o':countori[27],'p28o':countori[28],'p29o':countori[29],'p30o':countori[30],\
'p31o':countori[31],'p32o':countori[32],'M':M,'UM':UM,'Mo':Mo,'UMo':UMo,'strand':strand}, index=[0])
if w==6 and not optional:
opt=pd.DataFrame({'chrom':chrom,'pos':pos,'p01':count[1],'p02':count[2],'p03':count[3],'p04':count[4],\
'p05':count[5],'p06':count[6],'p07':count[7],'p08':count[8],'p09':count[9],'p10':count[10],\
'p11':count[11],'p12':count[12],'p13':count[13],'p14':count[14],'p15':count[15],\
'p16':count[16],'p17':count[17],'p18':count[18],'p19':count[19],'p20':count[20],\
'p21':count[21],'p22':count[22],'p23':count[23],'p24':count[24],'p25':count[25],\
'p26':count[26],'p27':count[27],'p28':count[28],'p29':count[29],'p30':count[30],\
'p31':count[31],'p32':count[32],'p33':count[33],'p34':count[34],'p35':count[35],\
'p36':count[36],'p37':count[37],'p38':count[38],'p39':count[39],'p40':count[40],\
'p41':count[41],'p42':count[42],'p43':count[43],'p44':count[44],'p45':count[45],\
'p46':count[46],'p47':count[47],'p48':count[48],'p49':count[49],'p50':count[50],\
'p51':count[51],'p52':count[52],'p53':count[53],'p54':count[54],'p55':count[55],\
'p56':count[56],'p57':count[57],'p58':count[58],'p59':count[59],'p60':count[60],\
'p61':count[61],'p62':count[62],'p63':count[63],'p64':count[64],'M':M,'UM':UM,\
'strand':strand}, index=[0])
if w==6 and optional:
opt=pd.DataFrame({'chrom':chrom,'pos':pos,'p01':count[1],'p02':count[2],'p03':count[3],'p04':count[4],\
'p05':count[5],'p06':count[6],'p07':count[7],'p08':count[8],'p09':count[9],'p10':count[10],\
'p11':count[11],'p12':count[12],'p13':count[13],'p14':count[14],'p15':count[15],\
'p16':count[16],'p17':count[17],'p18':count[18],'p19':count[19],'p20':count[20],\
'p21':count[21],'p22':count[22],'p23':count[23],'p24':count[24],'p25':count[25],\
'p26':count[26],'p27':count[27],'p28':count[28],'p29':count[29],'p30':count[30],\
'p31':count[31],'p32':count[32],'p33':count[33],'p34':count[34],'p35':count[35],\
'p36':count[36],'p37':count[37],'p38':count[38],'p39':count[39],'p40':count[40],\
'p41':count[41],'p42':count[42],'p43':count[43],'p44':count[44],'p45':count[45],\
'p46':count[46],'p47':count[47],'p48':count[48],'p49':count[49],'p50':count[50],\
'p51':count[51],'p52':count[52],'p53':count[53],'p54':count[54],'p55':count[55],\
'p56':count[56],'p57':count[57],'p58':count[58],'p59':count[59],'p60':count[60],\
'p61':count[61],'p62':count[62],'p63':count[63],'p64':count[64],'p01o':countori[1],'p02o':countori[2],\
'p03o':countori[3],'p04o':countori[4],\
'p05o':countori[5],'p06o':countori[6],'p07o':countori[7],'p08o':countori[8],'p09o':countori[9],'p10o':countori[10],\
'p11o':countori[11],'p12o':countori[12],'p13o':countori[13],'p14o':countori[14],'p15o':countori[15],\
'p16o':countori[16],'p17o':countori[17],'p18o':countori[18],'p19o':countori[19],'p20o':countori[20],\
'p21o':countori[21],'p22o':countori[22],'p23o':countori[23],'p24o':countori[24],'p25o':countori[25],\
'p26o':countori[26],'p27o':countori[27],'p28o':countori[28],'p29o':countori[29],'p30o':countori[30],\
'p31o':countori[31],'p32o':countori[32],'p33o':countori[33],'p34o':countori[34],\
'p35o':countori[35],'p36o':countori[36],'p37o':countori[37],'p38o':countori[38],'p39o':countori[39],'p40o':countori[40],\
'p41o':countori[41],'p42o':countori[42],'p43o':countori[43],'p44o':countori[44],'p45o':countori[45],\
'p46o':countori[46],'p47o':countori[47],'p48o':countori[48],'p49o':countori[49],'p50o':countori[50],\
'p51o':countori[51],'p52o':countori[52],'p53o':countori[53],'p54o':countori[54],'p55o':countori[55],\
'p56o':countori[56],'p57o':countori[57],'p58o':countori[58],'p59o':countori[59],'p60o':countori[60],\
'p61o':countori[61],'p62o':countori[62],'p63o':countori[63],'p64o':countori[64],'M':M,'UM':UM,'Mo':Mo,'UMo':UMo,\
'strand':strand}, index=[0])
return opt
def impute(window,w):
full_ind=np.where(np.isnan(window).sum(axis=1)==0)[0]
part_ind=np.where(np.isnan(window).sum(axis=1)==1)[0]
for i in range(len(part_ind)):
sam = []
# which column is nan
pos=np.where(np.isnan(window[part_ind[i],:]))[0]
if np.unique(window[np.where(np.invert(np.isnan(window[:,pos])))[0],pos]).shape[0]==1:
window[part_ind[i],pos]=window[np.where(np.invert(np.isnan(window[:,pos])))[0],pos][0]
else:
#print("win_part i pos =",window[part_ind[i],pos])
for j in range(len(full_ind)):
if (window[part_ind[i],:]==window[full_ind[j],:]).sum()==w-1:
sam.append(j)
if len(sam)>0:
s1=random.sample(sam, 1)
s=window[full_ind[s1],pos]
else:
s=random.sample(window[np.where(np.invert(np.isnan(window[:,pos])))[0],pos].tolist(), k=1)[0]
window[part_ind[i],pos]=np.float64(s)
#print("win_part i =",window[part_ind[i],pos])
#print("s = ",np.float64(s))
return window
def CGgenome_scr(bamfile,chrom,w,fa,mC=4,silence=False,optional=False,folder='MeHdata'):
filename, file_extension = os.path.splitext(bamfile)
coverage = cov_context = 0
# load bamfile
samfile = pysam.AlignmentFile("%s/%s.bam" % (folder,filename), "rb")
# load reference genome
fastafile = pysam.FastaFile('%s/%s.fa' % (folder,fa))
# initialise data frame for genome screening (load C from bam file)
aggreR = aggreC = pd.DataFrame(columns=['Qname'])
# if user wants to output compositions of methylation patterns at every eligible window, initialise data frame
if w==3 and not optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','M','UM','strand'])
if w==4 and not optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','M','UM','strand'])
if w==5 and not optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p17','p18',\
'p19','p20','p21','p22','p23','p24','p25','p26','p27','p28','p29','p30','p31','p32',\
'M','UM','Mo','UMo','strand'])
if w==6 and not optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p17','p18',\
'p19','p20','p21','p22','p23','p24','p25','p26','p27','p28','p29','p30','p31','p32',\
'p33','p34','p35','p36','p37','p38','p39','p40','p41','p42','p43','p44','p45','p46',\
'p47','p48','p49','p50','p51','p52','p53','p54','p55','p56','p57','p58','p59','p60',\
'p61','p62','p63','p64','M','UM','strand'])
if w==7 and not optional:
ResultPW = pd.DataFrame(columns=\
['chrom','pos','M','UM','p01','p02','p03','p04','p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16'\
,'p17','p18','p19','p20','p21','p22','p23','p24','p25','p26','p27','p28',\
'p29','p30','p31','p32','p33','p34','p35','p36','p37','p38','p39','p40','p41','p42','p43','p44','p45','p46'\
,'p47','p48','p49','p50','p51','p52','p53','p54','p55','p56','p57','p58','p59','p60','p61','p62','p63','p64'\
,'p65','p66','p67','p68','p69','p70','p71','p72','p73','p74','p75','p76','p77','p78','p79','p80','p81','p82','p83','p84','p85','p86'\
,'p87','p88','p89','p90','p91','p92','p93','p94','p95','p96','p97','p98','p99','p100','p101','p102','p103','p104'\
,'p105','p106','p107','p108','p109','p120','p121','p122','p123','p124','p125','p126','p127','p128','strand'])
if w==3 and optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p01o','p02o','p03o','p04o',\
'p05o','p06o','p07o','p08o','M','UM','Mo','UMo','strand'])
if w==4 and optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p01o','p02o','p03o','p04o',\
'p05o','p06o','p07o','p08o','p09o','p10o','p11o','p12o','p13o','p14o','p15o','p16o','M','UM','Mo','UMo','strand'])
if w==5 and optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p17','p18',\
'p19','p20','p21','p22','p23','p24','p25','p26','p27','p28','p29','p30','p31','p32','p01o','p02o','p03o','p04o',\
'p05o','p06o','p07o','p08o','p09o','p10o','p11o','p12o','p13o','p14o','p15o','p16o','p17o','p18o',\
'p19o','p20o','p21o','p22o','p23o','p24o','p25o','p26o','p27o','p28o','p29o','p30o','p31o','p32o',\
'M','UM','Mo','UMo','strand'])
if w==6 and optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p17','p18',\
'p19','p20','p21','p22','p23','p24','p25','p26','p27','p28','p29','p30','p31','p32',\
'p33','p34','p35','p36','p37','p38','p39','p40','p41','p42','p43','p44','p45','p46',\
'p47','p48','p49','p50','p51','p52','p53','p54','p55','p56','p57','p58','p59','p60',\
'p61','p62','p63','p64','p01o','p02o','p03o','p04o',\
'p05o','p06o','p07o','p08o','p09o','p10o','p11o','p12o','p13o','p14o','p15o','p16o','p17o','p18o',\
'p19o','p20o','p21o','p22o','p23o','p24o','p25o','p26o','p27o','p28o','p29o','p30o','p31o','p32o',\
'p33o','p34o','p35o','p36o','p37o','p38o','p39o','p40o','p41o','p42o','p43o','p44o','p45o','p46o',\
'p47o','p48o','p49o','p50o','p51o','p52o','p53o','p54o','p55o','p56o','p57o','p58o','p59o','p60o',\
'p61o','p62o','p63o','p64o','M','UM','Mo','UMo','strand'])
neverr = never = True
chrom_list = []
# all samples' bam files
for i in samfile.get_index_statistics():
chrom_list.append(i.contig)
if chrom in chrom_list:
# screen bamfile by column
for pileupcolumn in samfile.pileup(chrom):
coverage += 1
if not silence:
if (pileupcolumn.pos % 2000000 == 1):
print("CG %s s %s w %s %s pos %s Result %s" % (datetime.datetime.now(),filename,w,chrom,pileupcolumn.pos,ResultPW.shape[0]))
# Forward strand, check if 'CG' in reference genome
if (fastafile.fetch(chrom,pileupcolumn.pos,pileupcolumn.pos+2)=='CG'):
cov_context += 1
temp = pd.DataFrame(columns=['Qname',pileupcolumn.pos+1])
pileupcolumn.set_min_base_quality(0)
# append reads in the column
for pileupread in pileupcolumn.pileups:
if not pileupread.is_del and not pileupread.is_refskip and not pileupread.alignment.is_reverse: # C
d = {'Qname': [pileupread.alignment.query_name], pileupcolumn.pos+1: [pileupread.alignment.query_sequence[pileupread.query_position]]}
df2 = pd.DataFrame(data=d)
temp=temp.append(df2, ignore_index=True)
# merge with other columns
if (not temp.empty):
aggreC = pd.merge(aggreC,temp,how='outer',on=['Qname'])
aggreC = aggreC.drop_duplicates()
# Reverse strand, check if 'CG' in reference genome
if pileupcolumn.pos>1:
if (fastafile.fetch(chrom,pileupcolumn.pos-1,pileupcolumn.pos+1)=='CG'):
cov_context += 1
tempr = pd.DataFrame(columns=['Qname',pileupcolumn.pos+1])
pileupcolumn.set_min_base_quality(0)
for pileupread in pileupcolumn.pileups:
if not pileupread.is_del and not pileupread.is_refskip and pileupread.alignment.is_reverse: # C
dr = {'Qname': [pileupread.alignment.query_name], pileupcolumn.pos+1: [pileupread.alignment.query_sequence[pileupread.query_position]]}
dfr2 = pd.DataFrame(data=dr)
tempr=tempr.append(dfr2, ignore_index=True)
if (not tempr.empty):
aggreR = pd.merge(aggreR,tempr,how='outer',on=['Qname'])
aggreR = aggreR.drop_duplicates()
# Impute and estimate, if there are 2w-1 columns
if never and aggreC.shape[1] == (2*w):
# C/G to 1, rest to 0, N to NA
never = False
aggreC = aggreC.replace(['C'],1)
aggreC = aggreC.replace(['T'],0)
aggreC = aggreC.replace(['A','N','G'],np.nan)
methbin = aggreC
meth = methbin.copy()
# remove read ID
meth = meth.drop('Qname',axis=1)
# back up for imputation
methtemp = meth.copy()
# imputation by sliding windows of w C by 1 C
for i in range(0,meth.shape[1]-w+1,1):
window = meth.iloc[:,range(i,i+w)].values
# save methylation statuses before imputation
# check if eligible for imputation, impute
if enough_reads(window,w,complete=False):
window=pd.DataFrame(data=impute(window,w))
ind=np.where(window.notnull().sum(axis=1)==w)[0]
methtemp.loc[methtemp.iloc[ind,:].index,meth.iloc[:,range(i,i+w)].columns]=window.loc[ind,:].values
# overwrite imputed window
# meth = methtemp.copy()
# Evaluate methylation level and methylation heterogeneity and append to result
for i in range(0,w,1): # w windows
windowold = meth.iloc[:,range(i,i+w)].values
window = methtemp.iloc[:,range(i,i+w)].values
M=(window==1).sum(axis=0)[0]
UM=(window==0).sum(axis=0)[0]
Mo=(windowold==1).sum(axis=0)[0]
UMo=(windowold==0).sum(axis=0)[0]
depth=M+UM
if depth>=mC:
# check if enough complete patterns for evaluating MeH
toappend=outwindow(window,patori=windowold,w=w,pos=meth.iloc[:,range(i,i+w)].columns[0],\
chrom=chrom,strand='f',mC=mC,M=M,UM=UM,Mo=Mo,UMo=UMo,optional=optional)
ResultPW=ResultPW.append(toappend)
# remove 1 column
aggreC = aggreC.drop(meth.columns[0:1],axis=1)
# drop rows with no values
aggreC.dropna(axis = 0, thresh=2, inplace = True)
# total += w
# Reverse
if neverr and aggreR.shape[1] == (2*w):
neverr = False
aggreR = aggreR.replace(['G'],1)
aggreR = aggreR.replace(['A'],0)
aggreR = aggreR.replace(['C','N','T'],np.nan)
methbin = aggreR # backup
#meth = methbin.iloc[:,methbin.columns!='Qname'] # pd to np
meth = methbin.copy()
meth = meth.drop('Qname',axis=1)
methtemp = meth.copy()
# impute once if valid
for i in range(0,meth.shape[1]-w+1,1):
window = meth.iloc[:,range(i,i+w)].values
# if eligible for imputation
if enough_reads(window,w,complete=False):
window=pd.DataFrame(data=impute(window,w))
ind=np.where(window.notnull().sum(axis=1)==w)[0]
methtemp.loc[methtemp.iloc[ind,:].index,meth.iloc[:,range(i,i+w)].columns]=window.loc[ind,:].values
# meth = methtemp.copy()
# compute coverage and output summary
# for i in range(0,meth.shape[1]-w+1,1):
# if i<w:
for i in range(0,w,1):
windowold = meth.iloc[:,range(i,i+w)].values
window = methtemp.iloc[:,range(i,i+w)].values
M=(window==1).sum(axis=0)[0]
UM=(window==0).sum(axis=0)[0]
Mo=(windowold==1).sum(axis=0)[0]
UMo=(windowold==0).sum(axis=0)[0]
depth=M+UM
if depth>=mC:
# check if enough complete patterns for evaluating MeH
toappend=outwindow(window,patori=windowold,w=w,pos=meth.iloc[:,range(i,i+w)].columns[0],\
chrom=chrom,strand='r',mC=mC,M=M,UM=UM,Mo=Mo,UMo=UMo,optional=optional)
ResultPW=ResultPW.append(toappend)
aggreR = aggreR.drop(meth.columns[0:1],axis=1)
aggreR.dropna(axis = 0, thresh=2, inplace = True)
#------------------
# SECONDARY CASE
#------------------
if (aggreC.shape[1] == (3*w-1)):
aggreC = aggreC.replace(['C'],1)
aggreC = aggreC.replace(['T'],0)
aggreC = aggreC.replace(['A','N','G'],np.nan)
methbin = aggreC # backup
#meth = methbin.iloc[:,methbin.columns!='Qname'] # pd to np
meth = methbin.copy()
meth = meth.drop('Qname',axis=1)
methtemp = meth.copy()
# impute once if valid
for i in range(0,meth.shape[1]-w+1,1):
window = meth.iloc[:,range(i,i+w)].values
if enough_reads(window,w,complete=False):
window=pd.DataFrame(data=impute(window,w))
ind=np.where(window.notnull().sum(axis=1)==w)[0]
methtemp.loc[methtemp.iloc[ind,:].index,meth.iloc[:,range(i,i+w)].columns]=window.loc[ind,:].values
# meth = methtemp.copy()
# compute coverage and output summary
for i in range(w-1,2*w-1,1):
windowold = meth.iloc[:,range(i,i+w)].values
window = methtemp.iloc[:,range(i,i+w)].values
M=(window==1).sum(axis=0)[0]
UM=(window==0).sum(axis=0)[0]
Mo=(windowold==1).sum(axis=0)[0]
UMo=(windowold==0).sum(axis=0)[0]
depth=M+UM
if depth>=mC:
# check if enough complete patterns for evaluating MeH
toappend=outwindow(window,patori=windowold,w=w,pos=meth.iloc[:,range(i,i+w)].columns[0],\
chrom=chrom,strand='f',mC=mC,M=M,UM=UM,Mo=Mo,UMo=UMo,optional=optional)
ResultPW=ResultPW.append(toappend)
if ResultPW.shape[0] % 100000 == 1:
ResultPW.to_csv(r"%s/CG_%s_%s.csv"%(folder,filename,chrom),index = False, header=True)
if not silence:
print("Checkpoint CG. For file %s: %s results obtained up to position chr %s: %s." % (filename,ResultPW.shape[0],chrom,pileupcolumn.pos))
aggreC = aggreC.drop(meth.columns[0:w],axis=1)
aggreC.dropna(axis = 0, thresh=2, inplace = True)
#print(aggreC)
#total += w
# reverse
if (aggreR.shape[1] == (3*w-1)):
aggreR = aggreR.replace(['G'],1)
aggreR = aggreR.replace(['A'],0)
aggreR = aggreR.replace(['C','N','T'],np.nan)
methbin = aggreR # backup
#meth = methbin.iloc[:,methbin.columns!='Qname'] # pd to np
meth = methbin.copy()
meth = meth.drop('Qname',axis=1)
methtemp = meth.copy()
# impute once if valid
for i in range(0,meth.shape[1]-w+1,1):
window = meth.iloc[:,range(i,i+w)].values
# if eligible for imputation
if enough_reads(window,w,complete=False):
window=pd.DataFrame(data=impute(window,w))
ind=np.where(window.notnull().sum(axis=1)==w)[0]
methtemp.loc[methtemp.iloc[ind,:].index,meth.iloc[:,range(i,i+w)].columns]=window.loc[ind,:].values
# meth = methtemp.copy()
# compute coverage and output summary
for i in range(w-1,2*w-1,1):
windowold = meth.iloc[:,range(i,i+w)].values
window = methtemp.iloc[:,range(i,i+w)].values
M=(window==1).sum(axis=0)[0]
UM=(window==0).sum(axis=0)[0]
Mo=(windowold==1).sum(axis=0)[0]
UMo=(windowold==0).sum(axis=0)[0]
depth=M+UM
if depth>=mC:
# check if enough complete patterns for evaluating MeH
toappend=outwindow(window,patori=windowold,w=w,pos=meth.iloc[:,range(i,i+w)].columns[0],\
chrom=chrom,strand='r',mC=mC,M=M,UM=UM,Mo=Mo,UMo=UMo,optional=optional)
ResultPW=ResultPW.append(toappend)
if ResultPW.shape[0] % 100000 == 1:
ResultPW.to_csv(r"%s/CG_%s_%s.csv"%(folder,filename,chrom),index = False, header=True)
aggreR = aggreR.drop(meth.columns[0:w],axis=1)
aggreR.dropna(axis = 0, thresh=2, inplace = True)
if ResultPW.shape[0]>0:
ResultPW.to_csv(r"%s/CG_%s_%s.csv"%(folder,filename,chrom),index = False, header=True)
return filename, coverage, cov_context, 'CG'
print("Done CG for file %s: %s results obtained up to position chr %s: %s." % (filename,ResultPW.shape[0],chrom,pileupcolumn.pos))
#samfile.close()
def CHHgenome_scr(bamfile,chrom,w,fa,mC=4,silence=False,optional=False,folder='MeHdata',minML=0.05):
filename, file_extension = os.path.splitext(bamfile)
coverage = cov_context = 0
# load bamfile
samfile = pysam.AlignmentFile("%s/%s.bam" % (folder,filename), "rb")
# load reference genome
fastafile = pysam.FastaFile('%s/%s.fa' % (folder,fa))
# initialise data frame for genome screening (load C from bam file)
aggreR = aggreC = pd.DataFrame(columns=['Qname'])
# if user wants to output compositions of methylation patterns at every eligible window, initialise data frame
if w==3 and not optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','M','UM','strand'])
if w==4 and not optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','M','UM','strand'])
if w==5 and not optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p17','p18',\
'p19','p20','p21','p22','p23','p24','p25','p26','p27','p28','p29','p30','p31','p32',\
'M','UM','Mo','UMo','strand'])
if w==6 and not optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p17','p18',\
'p19','p20','p21','p22','p23','p24','p25','p26','p27','p28','p29','p30','p31','p32',\
'p33','p34','p35','p36','p37','p38','p39','p40','p41','p42','p43','p44','p45','p46',\
'p47','p48','p49','p50','p51','p52','p53','p54','p55','p56','p57','p58','p59','p60',\
'p61','p62','p63','p64','M','UM','strand'])
if w==7 and not optional:
ResultPW = pd.DataFrame(columns=\
['chrom','pos','M','UM','p01','p02','p03','p04','p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16'\
,'p17','p18','p19','p20','p21','p22','p23','p24','p25','p26','p27','p28',\
'p29','p30','p31','p32','p33','p34','p35','p36','p37','p38','p39','p40','p41','p42','p43','p44','p45','p46'\
,'p47','p48','p49','p50','p51','p52','p53','p54','p55','p56','p57','p58','p59','p60','p61','p62','p63','p64'\
,'p65','p66','p67','p68','p69','p70','p71','p72','p73','p74','p75','p76','p77','p78','p79','p80','p81','p82','p83','p84','p85','p86'\
,'p87','p88','p89','p90','p91','p92','p93','p94','p95','p96','p97','p98','p99','p100','p101','p102','p103','p104'\
,'p105','p106','p107','p108','p109','p120','p121','p122','p123','p124','p125','p126','p127','p128','strand'])
if w==3 and optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p01o','p02o','p03o','p04o',\
'p05o','p06o','p07o','p08o','M','UM','Mo','UMo','strand'])
if w==4 and optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p01o','p02o','p03o','p04o',\
'p05o','p06o','p07o','p08o','p09o','p10o','p11o','p12o','p13o','p14o','p15o','p16o','M','UM','Mo','UMo','strand'])
if w==5 and optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p17','p18',\
'p19','p20','p21','p22','p23','p24','p25','p26','p27','p28','p29','p30','p31','p32','p01o','p02o','p03o','p04o',\
'p05o','p06o','p07o','p08o','p09o','p10o','p11o','p12o','p13o','p14o','p15o','p16o','p17o','p18o',\
'p19o','p20o','p21o','p22o','p23o','p24o','p25o','p26o','p27o','p28o','p29o','p30o','p31o','p32o',\
'M','UM','Mo','UMo','strand'])
if w==6 and optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p17','p18',\
'p19','p20','p21','p22','p23','p24','p25','p26','p27','p28','p29','p30','p31','p32',\
'p33','p34','p35','p36','p37','p38','p39','p40','p41','p42','p43','p44','p45','p46',\
'p47','p48','p49','p50','p51','p52','p53','p54','p55','p56','p57','p58','p59','p60',\
'p61','p62','p63','p64','p01o','p02o','p03o','p04o',\
'p05o','p06o','p07o','p08o','p09o','p10o','p11o','p12o','p13o','p14o','p15o','p16o','p17o','p18o',\
'p19o','p20o','p21o','p22o','p23o','p24o','p25o','p26o','p27o','p28o','p29o','p30o','p31o','p32o',\
'p33o','p34o','p35o','p36o','p37o','p38o','p39o','p40o','p41o','p42o','p43o','p44o','p45o','p46o',\
'p47o','p48o','p49o','p50o','p51o','p52o','p53o','p54o','p55o','p56o','p57o','p58o','p59o','p60o',\
'p61o','p62o','p63o','p64o','M','UM','Mo','UMo','strand'])
neverr = never = True
if samfile.is_valid_reference_name(chrom):
for pileupcolumn in samfile.pileup(chrom):
coverage += 1
if not silence:
if (pileupcolumn.pos % 2000000 == 1):
print("CHH %s s %s w %s %s pos %s Result %s" % (datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),filename,w,chrom,pileupcolumn.pos,ResultPW.shape[0]))
# forward
if fastafile.fetch(chrom,pileupcolumn.pos,pileupcolumn.pos+1)=='C' and fastafile.fetch(chrom,pileupcolumn.pos+1,pileupcolumn.pos+2)!='G' and fastafile.fetch(chrom,pileupcolumn.pos+2,pileupcolumn.pos+3)!='G':
cov_context += 1
temp = pd.DataFrame(columns=['Qname',pileupcolumn.pos+1])
pileupcolumn.set_min_base_quality(0)
for pileupread in pileupcolumn.pileups:
if not pileupread.is_del and not pileupread.is_refskip and not pileupread.alignment.is_reverse: # C
d = {'Qname': [pileupread.alignment.query_name], pileupcolumn.pos+1: [pileupread.alignment.query_sequence[pileupread.query_position]]}
df2 = pd.DataFrame(data=d)
#df2.head()
temp=temp.append(df2, ignore_index=True)
if (not temp.empty):
#temp.head()
aggreC = pd.merge(aggreC,temp,how='outer',on=['Qname'])
aggreC = aggreC.drop_duplicates()
# reverse
if pileupcolumn.pos>2:
if fastafile.fetch(chrom,pileupcolumn.pos,pileupcolumn.pos+1)=='G' and fastafile.fetch(chrom,pileupcolumn.pos-1,pileupcolumn.pos)!='C' and fastafile.fetch(chrom,pileupcolumn.pos-2,pileupcolumn.pos-1)!='C':
cov_context += 1
tempr = pd.DataFrame(columns=['Qname',pileupcolumn.pos+1])
pileupcolumn.set_min_base_quality(0)
for pileupread in pileupcolumn.pileups:
if not pileupread.is_del and not pileupread.is_refskip and pileupread.alignment.is_reverse: # C
d = {'Qname': [pileupread.alignment.query_name], pileupcolumn.pos+1: [pileupread.alignment.query_sequence[pileupread.query_position]]}
df2 = pd.DataFrame(data=d)
#df2.head()
tempr=tempr.append(df2, ignore_index=True)
if (not tempr.empty):
aggreR = pd.merge(aggreR,tempr,how='outer',on=['Qname'])
aggreR = aggreR.drop_duplicates()
if never and aggreC.shape[1] == (2*w):
never = False
aggreC = aggreC.replace(['C'],1)
aggreC = aggreC.replace(['T'],0)
aggreC = aggreC.replace(['A','N','G'],np.nan)
methbin = aggreC # backup
#meth = methbin.iloc[:,methbin.columns!='Qname'] # pd to np
meth = methbin.copy()
meth = meth.drop('Qname',axis=1)
methtemp = meth.copy()
# impute once if valid
for i in range(0,meth.shape[1]-w+1,1):
window = meth.iloc[:,range(i,i+w)].values
# if eligible for imputation
if enough_reads(window,w,complete=False):
window=pd.DataFrame(data=impute(window,w))
ind=np.where(window.notnull().sum(axis=1)==w)[0]
methtemp.loc[methtemp.iloc[ind,:].index,meth.iloc[:,range(i,i+w)].columns]=window.loc[ind,:].values
# meth = methtemp.copy()
# compute coverage and output summary
for i in range(0,w,1):
window = methtemp.iloc[:,range(i,i+w)].values
windowold = meth.iloc[:,range(i,i+w)].values
M=(window==1).sum(axis=0)[0]
UM=(window==0).sum(axis=0)[0]
Mo=(windowold==1).sum(axis=0)[0]
UMo=(windowold==0).sum(axis=0)[0]
depth=M+UM
if depth>=mC:
if M/depth > minML:
# check if enough complete patterns for evaluating MeH
toappend=outwindow(window,patori=windowold,w=w,pos=meth.iloc[:,range(i,i+w)].columns[0],\
chrom=chrom,strand='f',mC=mC,M=M,UM=UM,Mo=Mo,UMo=UMo,optional=optional)
ResultPW=ResultPW.append(toappend)
aggreC = aggreC.drop(meth.columns[0:1],axis=1)
aggreC.dropna(axis = 0, thresh=2, inplace = True)
#total += w
# reverse
if neverr and aggreR.shape[1] == (2*w):
neverr = False
aggreR = aggreR.replace(['G'],1)
aggreR = aggreR.replace(['A'],0)
aggreR = aggreR.replace(['C','N','T'],np.nan)
methbin = aggreR # backup
#meth = methbin.iloc[:,methbin.columns!='Qname'] # pd to np
meth = methbin.copy()
meth = meth.drop('Qname',axis=1)
methtemp = meth.copy()
# impute once if valid
for i in range(0,meth.shape[1]-w+1,1):
window = meth.iloc[:,range(i,i+w)].values
# if eligible for imputation
if enough_reads(window,w,complete=False):
window=pd.DataFrame(data=impute(window,w))
ind=np.where(window.notnull().sum(axis=1)==w)[0]
methtemp.loc[methtemp.iloc[ind,:].index,meth.iloc[:,range(i,i+w)].columns]=window.loc[ind,:].values
# meth = methtemp.copy()
# compute coverage and output summary
for i in range(0,w,1):
window = methtemp.iloc[:,range(i,i+w)].values
windowold = meth.iloc[:,range(i,i+w)].values
M=(window==1).sum(axis=0)[0]
UM=(window==0).sum(axis=0)[0]
Mo=(windowold==1).sum(axis=0)[0]
UMo=(windowold==0).sum(axis=0)[0]
depth=M+UM
if depth>=mC:
if M/depth > minML:
# check if enough complete patterns for evaluating MeH
toappend=outwindow(window,patori=windowold,w=w,pos=meth.iloc[:,range(i,i+w)].columns[0],\
chrom=chrom,strand='r',mC=mC,M=M,UM=UM,Mo=Mo,UMo=UMo,optional=optional)
ResultPW=ResultPW.append(toappend)
aggreR = aggreR.drop(meth.columns[0:1],axis=1)
aggreR.dropna(axis = 0, thresh=2, inplace = True)
#------------------
# SECONDARY CASE
#------------------
if (aggreC.shape[1] == (3*w-1)):
aggreC = aggreC.replace(['C'],1)
aggreC = aggreC.replace(['T'],0)
aggreC = aggreC.replace(['N','G','A'],np.nan)
methbin = aggreC # backup
#meth = methbin.iloc[:,methbin.columns!='Qname'] # pd to np
meth = methbin.copy()
meth = meth.drop('Qname',axis=1)
methtemp = meth.copy()
# impute once if valid
for i in range(0,meth.shape[1]-w+1,1):
window = meth.iloc[:,range(i,i+w)].values
# if eligible for imputation
if enough_reads(window,w,complete=False):
window=pd.DataFrame(data=impute(window,w))
ind=np.where(window.notnull().sum(axis=1)==w)[0]
methtemp.loc[methtemp.iloc[ind,:].index,meth.iloc[:,range(i,i+w)].columns]=window.loc[ind,:].values
# meth = methtemp.copy()
# compute coverage and output summary
for i in range(0,w,1):
window = methtemp.iloc[:,range(i,i+w)].values
windowold = meth.iloc[:,range(i,i+w)].values
M=(window==1).sum(axis=0)[0]
UM=(window==0).sum(axis=0)[0]
Mo=(windowold==1).sum(axis=0)[0]
UMo=(windowold==0).sum(axis=0)[0]
depth=M+UM
if depth>=mC:
if M/depth > minML:
# check if enough complete patterns for evaluating MeH
toappend=outwindow(window,patori=windowold,w=w,pos=meth.iloc[:,range(i,i+w)].columns[0],\
chrom=chrom,strand='f',mC=mC,M=M,UM=UM,Mo=Mo,UMo=UMo,optional=optional)
ResultPW=ResultPW.append(toappend)
if ResultPW.shape[0] % 100000 == 1:
ResultPW.to_csv(r"%s/CHH_%s_%s.csv"%(folder,filename,chrom),index = False, header=True)
if not silence:
print("Checkpoint CHH. For file %s: %s results obtained up to position chr %s: %s." % (filename,ResultPW.shape[0],chrom,pileupcolumn.pos))
aggreC = aggreC.drop(meth.columns[0:w],axis=1)
aggreC.dropna(axis = 0, thresh=2, inplace = True)
#print(aggreC)
#total += w
if (aggreR.shape[1] == (3*w-1)):
aggreR = aggreR.replace(['G'],1)
aggreR = aggreR.replace(['A'],0)
aggreR = aggreR.replace(['N','T','C'],np.nan)
methbin = aggreR # backup
#meth = methbin.iloc[:,methbin.columns!='Qname'] # pd to np
meth = methbin.copy()
meth = meth.drop('Qname',axis=1)
methtemp = meth.copy()
# impute once if valid
for i in range(0,meth.shape[1]-w+1,1):
window = meth.iloc[:,range(i,i+w)].values
# if eligible for imputation
if enough_reads(window,w,complete=False):
window=pd.DataFrame(data=impute(window,w))
ind=np.where(window.notnull().sum(axis=1)==w)[0]
methtemp.loc[methtemp.iloc[ind,:].index,meth.iloc[:,range(i,i+w)].columns]=window.loc[ind,:].values
# meth = methtemp.copy()
# compute coverage and output summary
for i in range(0,w,1):
window = methtemp.iloc[:,range(i,i+w)].values
windowold = meth.iloc[:,range(i,i+w)].values
M=(window==1).sum(axis=0)[0]
UM=(window==0).sum(axis=0)[0]
Mo=(windowold==1).sum(axis=0)[0]
UMo=(windowold==0).sum(axis=0)[0]
depth=M+UM
if depth>=mC:
if M/depth > minML:
toappend=outwindow(window,patori=windowold,w=w,pos=meth.iloc[:,range(i,i+w)].columns[0],\
chrom=chrom,strand='r',mC=mC,M=M,UM=UM,Mo=Mo,UMo=UMo,optional=optional)
ResultPW=ResultPW.append(toappend)
if ResultPW.shape[0] % 100000 == 1:
ResultPW.to_csv(r"%s/CHH_%s_%s.csv"%(folder,filename,chrom),index = False, header=True)
if not silence:
print("Checkpoint CHH. For file %s: %s results obtained up to position chr %s: %s." % (filename,ResultPW.shape[0],chrom,pileupcolumn.pos))
aggreR = aggreR.drop(meth.columns[0:w],axis=1)
aggreR.dropna(axis = 0, thresh=2, inplace = True)
#print(aggreC)
#total += w
if ResultPW.shape[0]>0:
ResultPW.to_csv(r"%s/CHH_%s_%s.csv"%(folder,filename,chrom),index = False, header=True)
return sample, coverage, cov_context, 'CHH'
print("Done CHH for file %s: %s results obtained up to position chr %s: %s." % (filename,ResultPW.shape[0],chrom,pileupcolumn.pos))
def CHGgenome_scr(bamfile,chrom,w,fa,mC=4,silence=False,optional=False,folder='MeHdata',minML=0.05):
filename, file_extension = os.path.splitext(bamfile)
coverage = cov_context = 0
# load bamfile
samfile = pysam.AlignmentFile("%s/%s.bam" % (folder,filename), "rb")
# load reference genome
fastafile = pysam.FastaFile('%s/%s.fa' % (folder,fa))
# initialise data frame for genome screening (load C from bam file)
aggreR = aggreC = pd.DataFrame(columns=['Qname'])
# if user wants to output compositions of methylation patterns at every eligible window, initialise data frame
if w==3 and not optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','M','UM','strand'])
if w==4 and not optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','M','UM','strand'])
if w==5 and not optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p17','p18',\
'p19','p20','p21','p22','p23','p24','p25','p26','p27','p28','p29','p30','p31','p32',\
'M','UM','strand'])
if w==6 and not optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p17','p18',\
'p19','p20','p21','p22','p23','p24','p25','p26','p27','p28','p29','p30','p31','p32',\
'p33','p34','p35','p36','p37','p38','p39','p40','p41','p42','p43','p44','p45','p46',\
'p47','p48','p49','p50','p51','p52','p53','p54','p55','p56','p57','p58','p59','p60',\
'p61','p62','p63','p64','M','UM','strand'])
if w==7 and not optional:
ResultPW = pd.DataFrame(columns=\
['chrom','pos','M','UM','p01','p02','p03','p04','p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16'\
,'p17','p18','p19','p20','p21','p22','p23','p24','p25','p26','p27','p28',\
'p29','p30','p31','p32','p33','p34','p35','p36','p37','p38','p39','p40','p41','p42','p43','p44','p45','p46'\
,'p47','p48','p49','p50','p51','p52','p53','p54','p55','p56','p57','p58','p59','p60','p61','p62','p63','p64'\
,'p65','p66','p67','p68','p69','p70','p71','p72','p73','p74','p75','p76','p77','p78','p79','p80','p81','p82','p83','p84','p85','p86'\
,'p87','p88','p89','p90','p91','p92','p93','p94','p95','p96','p97','p98','p99','p100','p101','p102','p103','p104'\
,'p105','p106','p107','p108','p109','p120','p121','p122','p123','p124','p125','p126','p127','p128','strand'])
if w==3 and optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p01o','p02o','p03o','p04o',\
'p05o','p06o','p07o','p08o','M','UM','Mo','UMo','strand'])
if w==4 and optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p01o','p02o','p03o','p04o',\
'p05o','p06o','p07o','p08o','p09o','p10o','p11o','p12o','p13o','p14o','p15o','p16o','M','UM','Mo','UMo','strand'])
if w==5 and optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p17','p18',\
'p19','p20','p21','p22','p23','p24','p25','p26','p27','p28','p29','p30','p31','p32','p01o','p02o','p03o','p04o',\
'p05o','p06o','p07o','p08o','p09o','p10o','p11o','p12o','p13o','p14o','p15o','p16o','p17o','p18o',\
'p19o','p20o','p21o','p22o','p23o','p24o','p25o','p26o','p27o','p28o','p29o','p30o','p31o','p32o',\
'M','UM','Mo','UMo','strand'])
if w==6 and optional:
ResultPW=pd.DataFrame(columns=['chrom','pos','p01','p02','p03','p04',\
'p05','p06','p07','p08','p09','p10','p11','p12','p13','p14','p15','p16','p17','p18',\
'p19','p20','p21','p22','p23','p24','p25','p26','p27','p28','p29','p30','p31','p32',\
'p33','p34','p35','p36','p37','p38','p39','p40','p41','p42','p43','p44','p45','p46',\
'p47','p48','p49','p50','p51','p52','p53','p54','p55','p56','p57','p58','p59','p60',\
'p61','p62','p63','p64','p01o','p02o','p03o','p04o',\
'p05o','p06o','p07o','p08o','p09o','p10o','p11o','p12o','p13o','p14o','p15o','p16o','p17o','p18o',\
'p19o','p20o','p21o','p22o','p23o','p24o','p25o','p26o','p27o','p28o','p29o','p30o','p31o','p32o',\
'p33o','p34o','p35o','p36o','p37o','p38o','p39o','p40o','p41o','p42o','p43o','p44o','p45o','p46o',\
'p47o','p48o','p49o','p50o','p51o','p52o','p53o','p54o','p55o','p56o','p57o','p58o','p59o','p60o',\
'p61o','p62o','p63o','p64o','M','UM','Mo','UMo','strand'])
neverr = never = True
start=datetime.datetime.now()
if samfile.is_valid_reference_name(chrom):
for pileupcolumn in samfile.pileup(chrom):
coverage += 1
#chrom = pileupcolumn.reference_name
if not silence:
if (pileupcolumn.pos % 2000000 == 1):
print("CHG %s s %s w %s %s pos %s Result %s" % (datetime.datetime.now().strftime('%Y-%m-%d %H:%M:%S'),filename,w,chrom,pileupcolumn.pos,ResultPW.shape[0]))
if fastafile.fetch(chrom,pileupcolumn.pos,pileupcolumn.pos+1)=='C' and fastafile.fetch(chrom,pileupcolumn.pos+1,pileupcolumn.pos+2)!='G' and fastafile.fetch(chrom,pileupcolumn.pos+2,pileupcolumn.pos+3)=='G':
cov_context += 1
temp = pd.DataFrame(columns=['Qname',pileupcolumn.pos+1])
pileupcolumn.set_min_base_quality(0)
for pileupread in pileupcolumn.pileups:
if not pileupread.is_del and not pileupread.is_refskip and not pileupread.alignment.is_reverse: # C
d = {'Qname': [pileupread.alignment.query_name], pileupcolumn.pos+1: [pileupread.alignment.query_sequence[pileupread.query_position]]}
df2 = pd.DataFrame(data=d)
#df2.head()
temp=temp.append(df2, ignore_index=True)
if (not temp.empty):
#temp.head()
aggreC = pd.merge(aggreC,temp,how='outer',on=['Qname'])
aggreC = aggreC.drop_duplicates()
# reverse
if pileupcolumn.pos>2:
if fastafile.fetch(chrom,pileupcolumn.pos,pileupcolumn.pos+1)=='G' and fastafile.fetch(chrom,pileupcolumn.pos-1,pileupcolumn.pos)!='C' and fastafile.fetch(chrom,pileupcolumn.pos-2,pileupcolumn.pos-1)=='C':
cov_context += 1
tempr = pd.DataFrame(columns=['Qname',pileupcolumn.pos+1])
pileupcolumn.set_min_base_quality(0)
for pileupread in pileupcolumn.pileups:
if not pileupread.is_del and not pileupread.is_refskip and pileupread.alignment.is_reverse: # G
dr = {'Qname': [pileupread.alignment.query_name], pileupcolumn.pos+1: [pileupread.alignment.query_sequence[pileupread.query_position]]}
df2r = pd.DataFrame(data=dr)
#df2.head()
tempr=tempr.append(df2r, ignore_index=True)
if (not tempr.empty):
#temp.head()
aggreR = pd.merge(aggreR,tempr,how='outer',on=['Qname'])
aggreR = aggreR.drop_duplicates()
if never and aggreC.shape[1] == (2*w):
never = False
aggreC = aggreC.replace(['C'],1)
aggreC = aggreC.replace(['T'],0)
aggreC = aggreC.replace(['A','G','N'],np.nan)
methbin = aggreC # backup
#meth = methbin.iloc[:,methbin.columns!='Qname'] # pd to np
meth = methbin.copy()
meth = meth.drop('Qname',axis=1)
methtemp = meth.copy()
# impute once if valid
for i in range(0,meth.shape[1]-w+1,1):
window = meth.iloc[:,range(i,i+w)].values
# if eligible for imputation
if enough_reads(window,w,complete=False):
window=pd.DataFrame(data=impute(window,w))
ind=np.where(window.notnull().sum(axis=1)==w)[0]
methtemp.loc[methtemp.iloc[ind,:].index,meth.iloc[:,range(i,i+w)].columns]=window.loc[ind,:].values
# meth = methtemp.copy()
# compute coverage and output summary
for i in range(0,w,1):
windowold = meth.iloc[:,range(i,i+w)].values
window = methtemp.iloc[:,range(i,i+w)].values
M=(window==1).sum(axis=0)[0]
UM=(window==0).sum(axis=0)[0]
Mo=(windowold==1).sum(axis=0)[0]
UMo=(windowold==0).sum(axis=0)[0]
depth=M+UM
if depth>=mC:
if M/depth > minML:
toappend=outwindow(window,patori=windowold,w=w,pos=meth.iloc[:,range(i,i+w)].columns[0],\
chrom=chrom,strand='f',mC=mC,M=M,UM=UM,Mo=Mo,UMo=UMo,optional=optional)
ResultPW=ResultPW.append(toappend)
aggreC = aggreC.drop(meth.columns[0:1],axis=1)
aggreC.dropna(axis = 0, thresh=2, inplace = True)
#total += w
# reverse
if neverr and aggreR.shape[1] == (2*w):
neverr = False
aggreR = aggreR.replace(['G'],1)
aggreR = aggreR.replace(['A'],0)
aggreR = aggreR.replace(['N','C','T'],np.nan)
methbin = aggreR # backup
meth = methbin.copy()
meth = meth.drop('Qname',axis=1)
methtemp = meth.copy()
# impute once if valid
for i in range(0,meth.shape[1]-w+1,1):
window = meth.iloc[:,range(i,i+w)].values
# if eligible for imputation
if enough_reads(window,w,complete=False):
window=pd.DataFrame(data=impute(window,w))
ind=np.where(window.notnull().sum(axis=1)==w)[0]
methtemp.loc[methtemp.iloc[ind,:].index,meth.iloc[:,range(i,i+w)].columns]=window.loc[ind,:].values
# meth = methtemp.copy()
# compute coverage and output summary
for i in range(0,w,1):
windowold = meth.iloc[:,range(i,i+w)].values
window = methtemp.iloc[:,range(i,i+w)].values
M=(window==1).sum(axis=0)[0]
UM=(window==0).sum(axis=0)[0]
Mo=(windowold==1).sum(axis=0)[0]
UMo=(windowold==0).sum(axis=0)[0]
depth=M+UM
if depth>=mC:
if M/depth > minML:
toappend=outwindow(window,patori=windowold,w=w,pos=meth.iloc[:,range(i,i+w)].columns[0],\
chrom=chrom,strand='r',mC=mC,M=M,UM=UM,Mo=Mo,UMo=UMo,optional=optional)
ResultPW=ResultPW.append(toappend)
aggreR = aggreR.drop(meth.columns[0:1],axis=1)
aggreR.dropna(axis = 0, thresh=2, inplace = True)
#total += w
#------------------
# SECONDARY CASE
#------------------
if (aggreC.shape[1] == (3*w-1)):
aggreC = aggreC.replace(['C'],1)
aggreC = aggreC.replace(['T'],0)
aggreC = aggreC.replace(['N','A','G'],np.nan)
methbin = aggreC # backup
#meth = methbin.iloc[:,methbin.columns!='Qname'] # pd to np
meth = methbin.copy()
meth = meth.drop('Qname',axis=1)
methtemp = meth.copy()
# impute once if valid
for i in range(0,meth.shape[1]-w+1,1):
window = meth.iloc[:,range(i,i+w)].values
# if eligible for imputation
if enough_reads(window,w,complete=False):
window=pd.DataFrame(data=impute(window,w))
ind=np.where(window.notnull().sum(axis=1)==w)[0]
methtemp.loc[methtemp.iloc[ind,:].index,meth.iloc[:,range(i,i+w)].columns]=window.loc[ind,:].values
# cover original matrix
# meth = methtemp.copy()
# compute coverage and output summary
for i in range(w-1,2*w-1,1):
windowold = meth.iloc[:,range(i,i+w)].values
window = methtemp.iloc[:,range(i,i+w)].values
M=(window==1).sum(axis=0)[0]
UM=(window==0).sum(axis=0)[0]
Mo=(windowold==1).sum(axis=0)[0]
UMo=(windowold==0).sum(axis=0)[0]
depth=M+UM
if depth>=mC:
if M/depth > minML:
toappend=outwindow(window,patori=windowold,w=w,pos=meth.iloc[:,range(i,i+w)].columns[0],\
chrom=chrom,strand='f',mC=mC,M=M,UM=UM,Mo=Mo,UMo=UMo,optional=optional)
ResultPW=ResultPW.append(toappend)
if ResultPW.shape[0] % 100000:
ResultPW.to_csv(r"%s/CHG_%s_%s.csv"%(folder,filename,chrom),index = False, header=True)
if not silence:
print("Checkpoint CHG. For file %s: %s results obtained up to position chr %s: %s." % (filename,ResultPW.shape[0],chrom,pileupcolumn.pos+1))
aggreC = aggreC.drop(meth.columns[0:w],axis=1)
aggreC.dropna(axis = 0, thresh=2, inplace = True)
#print(aggreC)
#total += w
# reverse
if (aggreR.shape[1] == (3*w-1)):
aggreR = aggreR.replace(['G'],1)
aggreR = aggreR.replace(['A'],0)
aggreR = aggreR.replace(['N','T','C'],np.nan)
methbin = aggreR # backup
#meth = methbin.iloc[:,methbin.columns!='Qname'] # pd to np
meth = methbin.copy()
meth = meth.drop('Qname',axis=1)
methtemp = meth.copy()
# impute once if valid
for i in range(0,meth.shape[1]-w+1,1):
window = meth.iloc[:,range(i,i+w)].values
# if eligible for imputation
if enough_reads(window,w,complete=False):
window=pd.DataFrame(data=impute(window,w))
ind=np.where(window.notnull().sum(axis=1)==w)[0]
methtemp.loc[methtemp.iloc[ind,:].index,meth.iloc[:,range(i,i+w)].columns]=window.loc[ind,:].values
# cover original matrix
# meth = methtemp.copy()
# compute coverage and output summary
for i in range(w-1,2*w-1,1):
windowold = meth.iloc[:,range(i,i+w)].values
window = methtemp.iloc[:,range(i,i+w)].values
M=(window==1).sum(axis=0)[0]
UM=(window==0).sum(axis=0)[0]
Mo=(windowold==1).sum(axis=0)[0]
UMo=(windowold==0).sum(axis=0)[0]
depth=M+UM
if depth>=mC:
if M/depth > minML:
toappend=outwindow(window,patori=windowold,w=w,pos=meth.iloc[:,range(i,i+w)].columns[0],\
chrom=chrom,strand='r',mC=mC,M=M,UM=UM,Mo=Mo,UMo=UMo,optional=optional)
ResultPW=ResultPW.append(toappend)
if ResultPW.shape[0] % 100000 == 1:
ResultPW.to_csv(r"MeHdata/CHG_%s_%s.csv"%(filename,chrom),index = False, header=True)
if not silence:
print("Checkpoint CHG. For file %s: %s results obtained up to position chr %s: %s." % (filename,ResultPW.shape[0],chrom,pileupcolumn.pos+1))
aggreR = aggreR.drop(meth.columns[0:w],axis=1)
aggreR.dropna(axis = 0, thresh=2, inplace = True)
if ResultPW.shape[0]>0:
ResultPW.to_csv(r"%s/CHG_%s_%s.csv"%(folder,filename,chrom),index = False, header=True)
return filename, coverage, cov_context, 'CHG'
print("Done CHG for file %s: %s results obtained up to position chr %s: %s." % (filename,ResultPW.shape[0],chrom,pileupcolumn.pos+1))
def split_bam(samplenames,Folder):
# get bam size
spbam_list = []
bamfile = samplenames + '.bam'
statinfo_out = os.stat(Folder+bamfile)
bamsize = statinfo_out.st_size
samfile = pysam.Samfile(Folder+bamfile, "rb")
fileout_base = os.path.splitext(bamfile)[0] # filename
ext = '.bam'
x = 0
fileout = Folder+fileout_base+"_" + str(x)+ext # filename_x.bam
print("fileout",fileout)
header = samfile.header
outfile = pysam.Samfile(fileout, "wb", header = header)
sum_Outfile_Size=0
for reads in samfile.fetch():
outfile.write(reads)
statinfo_out = os.stat(fileout)
outfile_Size = statinfo_out.st_size
if(outfile_Size >=337374182 and sum_Outfile_Size <= bamsize):
sum_Outfile_Size = sum_Outfile_Size + outfile_Size
x = x + 1
spbam_list.append(fileout_base + "_" + str(x)+ext)
outfile.close()
pysam.index(fileout)
fileout = Folder+fileout_base + "_" + str(x)+ext
print("fileout",fileout)
outfile = pysam.Samfile(fileout, "wb",header = header)
outfile.close()
pysam.index(fileout)
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("-w", "--windowsize",type=int, default=4 ,help='number of CGs')
parser.add_argument("-c", "--cores",type=int, default=4, help='number of cores')
parser.add_argument("--CG", default=False, action='store_true', help='Include genomic context CG')
parser.add_argument("--CHG", default=False, action='store_true', help='Include genomic context CHG')
parser.add_argument("--CHH", default=False, action='store_true', help='Include genomic context CHH')
parser.add_argument("-mC", "--mindepth",type=int, default=4, help='Minimum depth per cytosine')
parser.add_argument('-f', "--foldername", default='MeHdata', type = str, help = 'Folder name of the location of input files' )
parser.add_argument('--opt', default=False, action='store_true', help='Output original count of patterns')
parser.add_argument('-mML', "--minML",type=float,default=0.05, help='Minimum methylation level for CHG/CHH results')
args = parser.parse_args()
import sys
import time
import os
import pandas as pd
import multiprocessing
from joblib import Parallel, delayed
#num_cores = multiprocessing.cpu_count()
if __name__ == "__main__":
#open_log('MeHscreening.log')
#start = time.time()
#Folder = 'MeHdata/'
Folder = args.foldername + '/'
files = os.listdir(Folder)
bam_list = []
# all samples' bam files
for file in files:
filename, file_extension = os.path.splitext(file)
if file_extension == '.fa':
fa = filename
if file_extension == '.bam':
bam_list.append(filename)
fastafile = pysam.FastaFile('%s%s.fa' % (Folder,fa))
chromosomes=[]
for chrom in fastafile.references:
chromosomes.append(chrom)
fastafile.close()
topp = pd.DataFrame(columns=['sample','coverage','context_coverage','context'])
#CG = []
#start=t.time()
if args.CG:
con='CG'
CG=Parallel(n_jobs=args.cores)(delayed(CGgenome_scr)(bam,chrom=c,w=args.windowsize,fa=fa,mC=args.mindepth,optional=args.opt,folder=args.foldername) for bam in bam_list for c in chromosomes)
for file in bam_list:
for c in chromosomes:
res_dir = Folder + con + '_'+ file + '.csv'
toapp_dir = Folder + con + '_'+ file + '_'+ c + '.csv'
if os.path.exists(res_dir) and os.path.exists(toapp_dir):
Tomod = pd.read_csv(res_dir)
Toappend = pd.read_csv(toapp_dir)
Tomod = Tomod.append(Toappend)
Tomod.to_csv(res_dir,index = False,header=True)
os.remove(toapp_dir)
elif os.path.exists(toapp_dir):
Toappend = pd.read_csv(toapp_dir)
Toappend.to_csv(res_dir,index = False,header=True)
os.remove(toapp_dir)
#logm("All done. "+str(len(bam_list))+" bam files processed and merged for CG.")
for i in CG:
toout=pd.DataFrame({'sample':i[0],'coverage':i[1],'context_coverage':i[2],'context':i[3]},index=[0])
topp=topp.append(toout)
#topp.groupby(['context','sample']).agg({'coverage': 'sum', 'context_coverage': 'sum'})
#print(topp)
if args.CHG:
con='CHG'
CG=Parallel(n_jobs=args.cores)(delayed(CHGgenome_scr)(bam,chrom=c,w=args.windowsize,fa=fa,mC=args.mindepth,optional=args.opt,folder=args.foldername,minML=args.minML) for bam in bam_list for c in chromosomes)
logm("Merging within samples for CHG.")
# not into bins of 400bp
for file in bam_list:
for c in chromosomes:
res_dir = Folder + con + '_'+ file + '.csv'
toapp_dir = Folder + con + '_'+ file + '_'+ c + '.csv'
if os.path.exists(res_dir) and os.path.exists(toapp_dir):
Tomod = pd.read_csv(res_dir)
Toappend = pd.read_csv(toapp_dir)
Tomod = Tomod.append(Toappend)
Tomod.to_csv(res_dir,index = False, header = True)
os.remove(toapp_dir)
elif os.path.exists(toapp_dir):
Toappend = pd.read_csv(toapp_dir)
Toappend.to_csv(res_dir,index = False,header = True)
os.remove(toapp_dir)
#logm("All done. "+str(len(bam_list))+" bam files processed and merged for CHG.")
for i in CG:
toout=pd.DataFrame({'sample':i[0],'coverage':i[1],'context_coverage':i[2],'context':i[3]},index=[0])
topp=topp.append(toout)
#topp.groupby(['context','sample']).agg({'coverage': 'sum', 'context_coverage': 'sum'})
if args.CHH:
con='CHH'
CG=Parallel(n_jobs=args.cores)(delayed(CHHgenome_scr)(bam,chrom=c,w=args.windowsize,fa=fa,mC=args.mindepth,optional=args.opt,folder=args.foldername,minML=args.minML) for bam in bam_list for c in chromosomes)
for file in bam_list:
for c in chromosomes:
res_dir = Folder + con + '_'+ file + '.csv'
toapp_dir = Folder + con + '_'+ file + '_'+ c + '.csv'
if os.path.exists(res_dir) and os.path.exists(toapp_dir):
Tomod = pd.read_csv(res_dir)
Toappend = pd.read_csv(toapp_dir)
Tomod = Tomod.append(Toappend)
Tomod.to_csv(res_dir,index = False,header=True)
os.remove(toapp_dir)
elif os.path.exists(toapp_dir):
Toappend = pd.read_csv(toapp_dir)
Toappend.to_csv(res_dir,index = False,header=True)
os.remove(toapp_dir)
print("All done.",len(bam_list),"bam files processed and merged for CHH.")
#logm("All done. "+str(len(bam_list))+" bam files processed and merged for CHH.")
for i in CG:
toout=pd.DataFrame({'sample':i[0],'coverage':i[1],'context_coverage':i[2],'context':i[3]},index=[0])
topp=topp.append(toout)
topp=topp.groupby(['context','sample']).agg({'context_coverage': 'sum', 'coverage': 'sum'}).reset_index()
end = time.time()
for i in range(topp.shape[0]):
print('Sample', topp.iloc[i,1],'has coverage',topp.iloc[i,2],'for context',topp.iloc[i,0],'out of data coverage',topp.iloc[i,3])
#logm('Sample '+str(topp.iloc[i,1])+' has coverage '+str(topp.iloc[i,2])+' for context '+str(topp.iloc[i,0])+' out of data coverage '+str(topp.iloc[i,3])+ '.')
| 59.339072
| 229
| 0.497269
| 9,899
| 79,277
| 3.950702
| 0.073543
| 0.012171
| 0.015342
| 0.016876
| 0.902398
| 0.894958
| 0.889179
| 0.87849
| 0.870461
| 0.862458
| 0
| 0.113542
| 0.29788
| 79,277
| 1,335
| 230
| 59.383521
| 0.589055
| 0.071685
| 0
| 0.818
| 0
| 0.008
| 0.135443
| 0.000286
| 0
| 0
| 0
| 0
| 0
| 1
| 0.011
| false
| 0
| 0.017
| 0
| 0.036
| 0.016
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
a3bdd447036de23c6c045088c1162dbf34670451
| 6,663
|
py
|
Python
|
attribution/visualizations.py
|
cmu-transparency/lib-attribution
|
248d10ac5518a220205081ff6a5954fa594d4d78
|
[
"MIT"
] | 5
|
2020-09-16T21:22:59.000Z
|
2021-08-12T01:03:31.000Z
|
attribution/visualizations.py
|
cmu-transparency/lib-attribution
|
248d10ac5518a220205081ff6a5954fa594d4d78
|
[
"MIT"
] | 1
|
2021-06-25T10:35:19.000Z
|
2021-06-27T08:17:07.000Z
|
attribution/visualizations.py
|
cmu-transparency/lib-attribution
|
248d10ac5518a220205081ff6a5954fa594d4d78
|
[
"MIT"
] | 1
|
2021-08-12T01:03:48.000Z
|
2021-08-12T01:03:48.000Z
|
import numpy as np
import keras
import keras.backend as K
from scipy import ndimage
from .VisualizationMethod import VisualizationMethod
from .AttributionMethod import AttributionMethod
from .methods import AumannShapley
class TopKWithBlur(VisualizationMethod):
def __init__(self, attributer, k=1, percentile=95, sigma=15, alpha=0.01):
VisualizationMethod.__init__(self, attributer)
assert k > 0, "k must be a positive integer"
self.k = k
self.percentile = percentile
self.sigma = sigma
self.alpha = alpha
if K.backend() == 'theano':
self.post_grad = lambda x: [x]
elif K.backend() == 'tensorflow':
self.post_grad = lambda x: x
else:
assert False, "Unsupported backend: %s" % K.backend()
self._attr_for_unit = [None for i in range(self.attributer.n_outs)]
def visualize(self, x):
if not self.attributer.is_compiled:
self.attributer.compile()
attribs = self.attributer.get_attributions(x)
return self.visualize_np(x, attribs)
def mask(self, x):
if not self.attributer.is_compiled:
self.attributer.compile()
attribs = self.attributer.get_attributions(x)
return self.mask_np(x, attribs)
def visualize_np(self, x, attribs, **kwargs):
return self._compute_vis(x, attribs, **kwargs)[0]
def mask_np(self, x, attribs, **kwargs):
return self._compute_vis(x, attribs, **kwargs)[1]
def _compute_vis(self, x, attribs, **kwargs):
if np.ndim(x) == K.ndim(self.model.input) - 1:
x = np.expand_dims(x, axis=0)
attribs = np.expand_dims(attribs, axis=0)
assert np.ndim(
attribs) == 2, "Unsupported attribution format: must have 2 dimensions"
order = np.argsort(attribs, axis=1)[:, ::-1][:, :self.k]
vis = []
masks = []
for i in range(len(x)):
if self.layer != self.model.layers[0]:
Q = sum([self.attributer.attribution_units[order[i, j]]
for j in range(self.k)])
infl = AumannShapley(self.model, 0, Q=Q, agg_fn=None).compile()
input_attrs = infl.get_attributions(
x[i], match_layer_shape=True)
else:
input_attrs = np.zeros_like(attribs[i])
input_attrs[order[i]] = x[i].flatten()[order[i]] # pylint: disable=unsupported-assignment-operation
input_attrs = input_attrs.reshape(x[i].shape) # pylint: disable=no-member
if K.image_data_format() == 'channels_first':
input_attrs = input_attrs.mean(axis=0)
else:
input_attrs = input_attrs.mean(axis=2)
input_attrs = np.abs(input_attrs)
input_attrs = np.clip(
input_attrs / np.percentile(input_attrs, 99), 0., 1.)
if self.sigma > 0:
input_attrs = ndimage.filters.gaussian_filter(
input_attrs, self.sigma)
thresh = np.percentile(input_attrs, self.percentile)
mask = (input_attrs > thresh).astype('float32')
if self.sigma > 0:
mask = ndimage.filters.gaussian_filter(mask, 2)
mask = np.clip(mask + self.alpha, 0., 1.)
if K.image_data_format() == 'channels_first':
mask = np.repeat(np.expand_dims(mask, 0), 3, axis=0)
else:
mask = np.repeat(np.expand_dims(mask, 2), 3, axis=2)
vis.append(x[i] * mask)
masks.append(mask)
return np.array(vis), np.array(mask)
class UnitsWithBlur(VisualizationMethod):
def __init__(self, attributer, units, percentile=95, sigma=15, alpha=0.1):
VisualizationMethod.__init__(self, attributer)
self.units = units
self.percentile = percentile
self.sigma = sigma
self.alpha = alpha
if K.backend() == 'theano':
self.post_grad = lambda x: [x]
elif K.backend() == 'tensorflow':
self.post_grad = lambda x: x
else:
assert False, "Unsupported backend: %s" % K.backend()
if not self.attributer.is_compiled:
self.attributer.compile()
self._attr_for_unit = [None for i in range(self.attributer.n_outs)]
def visualize(self, x):
return self.visualize_np(x, None)
def mask(self, x):
return self.mask_np(x, None)
def visualize_np(self, x, attribs, **kwargs):
return self._compute_vis(x, attribs, **kwargs)[0]
def mask_np(self, x, attribs, **kwargs):
return self._compute_vis(x, attribs, **kwargs)[1]
def _compute_vis(self, x, attribs, **kwargs):
if np.ndim(x) == K.ndim(self.model.input) - 1:
x = np.expand_dims(x, axis=0)
if self.layer != self.model.layers[0]:
Q = sum([self.attributer.attribution_units[self.units[j]]
for j in range(len(self.units))])
infl = AumannShapley(self.model, 0, Q=Q, agg_fn=None).compile()
order = self.units
vis = []
masks = []
for i in range(len(x)):
if self.layer != self.model.layers[0]:
input_attrs = infl.get_attributions(
x[i], match_layer_shape=True)
else:
input_attrs = np.zeros_like(attribs[i])
input_attrs[order[i]] = x[i].flatten()[order[i]] # pylint: disable=unsupported-assignment-operation
input_attrs = input_attrs.reshape(x[i].shape) # pylint: disable=no-member
if K.image_data_format() == 'channels_first':
input_attrs = input_attrs.mean(axis=0)
else:
input_attrs = input_attrs.mean(axis=2)
input_attrs = np.abs(input_attrs)
input_attrs = np.clip(
input_attrs / np.percentile(input_attrs, 99), 0., 1.)
input_attrs = ndimage.filters.gaussian_filter(
input_attrs, self.sigma)
thresh = np.percentile(input_attrs, self.percentile)
mask = (input_attrs > thresh).astype('float32')
mask = ndimage.filters.gaussian_filter(mask, 2)
mask = np.clip(mask + self.alpha, 0., 1.)
if K.image_data_format() == 'channels_first':
mask = np.repeat(np.expand_dims(mask, 0), 3, axis=0)
else:
mask = np.repeat(np.expand_dims(mask, 2), 3, axis=2)
vis.append(x[i] * mask)
masks.append(mask)
return np.array(vis), np.array(mask)
| 35.068421
| 116
| 0.576317
| 835
| 6,663
| 4.450299
| 0.149701
| 0.096878
| 0.037675
| 0.043057
| 0.838267
| 0.81028
| 0.780947
| 0.780947
| 0.780947
| 0.767492
| 0
| 0.014851
| 0.302716
| 6,663
| 189
| 117
| 35.253968
| 0.784976
| 0.022362
| 0
| 0.807143
| 0
| 0
| 0.035336
| 0
| 0
| 0
| 0
| 0
| 0.028571
| 1
| 0.085714
| false
| 0
| 0.05
| 0.042857
| 0.221429
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
431aaaac1b0140beeb9cea44338a4b5a4878cfcf
| 6,180
|
py
|
Python
|
src/assets/user_agents.py
|
diogenes895/lightnovel-crawler
|
69162799b05c7a84845e87f4d95041715c77c2ba
|
[
"Apache-2.0"
] | null | null | null |
src/assets/user_agents.py
|
diogenes895/lightnovel-crawler
|
69162799b05c7a84845e87f4d95041715c77c2ba
|
[
"Apache-2.0"
] | null | null | null |
src/assets/user_agents.py
|
diogenes895/lightnovel-crawler
|
69162799b05c7a84845e87f4d95041715c77c2ba
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
user_agents = [
# "Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.103 Safari/537.36",
# "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:66.0) Gecko/20100101 Firefox/66.0",
# "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:15.0) Gecko/20100101 Firefox/15.0.1",
# "Mozilla/5.0 (X11; CrOS x86_64 8172.45.0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/51.0.2704.64 Safari/537.36",
# "Mozilla/5.0 (Linux; Android 8.0.0; SM-G960F Build/R16NW) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/62.0.3202.84 Mobile Safari/537.36",
# "Mozilla/5.0 (Linux; Android 6.0.1; Nexus 6P Build/MMB29P) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/47.0.2526.83 Mobile Safari/537.36",
# "Mozilla/5.0 (Linux; Android 6.0; HTC One M9 Build/MRA58K) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.98 Mobile Safari/537.3",
# "Mozilla/5.0 (Linux; Android 7.0; Pixel C Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/52.0.2743.98 Safari/537.36",
# "Mozilla/5.0 (Linux; Android 6.0.1; SHIELD Tablet K1 Build/MRA58K; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/55.0.2883.91 Safari/537.36",
# "Mozilla/5.0 (iPhone; CPU iPhone OS 12_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/12.0 Mobile/15E148 Safari/604.1",
# "Mozilla/5.0 (iPhone; CPU iPhone OS 12_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) CriOS/69.0.3497.105 Mobile/15E148 Safari/605.1",
# "Mozilla/5.0 (iPhone; CPU iPhone OS 12_0 like Mac OS X) AppleWebKit/605.1.15 (KHTML, like Gecko) FxiOS/13.2b11866 Mobile/16A366 Safari/605.1.15",
# "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A372 Safari/604.1",
# "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.34 (KHTML, like Gecko) Version/11.0 Mobile/15A5341f Safari/604.1",
# "Mozilla/5.0 (iPhone; CPU iPhone OS 11_0 like Mac OS X) AppleWebKit/604.1.38 (KHTML, like Gecko) Version/11.0 Mobile/15A5370a Safari/604.1",
# "Mozilla/5.0 (iPhone9,3; U; CPU iPhone OS 10_0_1 like Mac OS X) AppleWebKit/602.1.50 (KHTML, like Gecko) Version/10.0 Mobile/14A403 Safari/602.1",
# "Mozilla/5.0 (Windows Phone 10.0; Android 6.0.1; Microsoft; RM-1152) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/52.0.2743.116 Mobile Safari/537.36 Edge/15.15254",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/59.0.3071.86 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/60.0.3112.78 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.1; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.2; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
"Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_9_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_10_5) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_12_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_13_6) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36",
"Mozilla/5.0 (Macintosh; Intel Mac OS X 10_14_0) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/61.0.3163.79 Safari/537.36"
]
| 116.603774
| 171
| 0.705825
| 1,184
| 6,180
| 3.642736
| 0.106419
| 0.086946
| 0.098076
| 0.185022
| 0.869928
| 0.85764
| 0.848134
| 0.816137
| 0.800139
| 0.786228
| 0
| 0.235206
| 0.135922
| 6,180
| 52
| 172
| 118.846154
| 0.572472
| 0.372168
| 0
| 0
| 0
| 0.9375
| 0.910645
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
43391433cde52613a08cf9267cdc9511047bd425
| 98
|
py
|
Python
|
workspace/module/python-2.7/LxBasic/.test/_test_re.py
|
no7hings/Lynxi
|
43c745198a714c2e5aca86c6d7a014adeeb9abf7
|
[
"MIT"
] | 2
|
2018-03-06T03:33:55.000Z
|
2019-03-26T03:25:11.000Z
|
workspace/module/python-2.7/LxBasic/.test/_test_re.py
|
no7hings/lynxi
|
43c745198a714c2e5aca86c6d7a014adeeb9abf7
|
[
"MIT"
] | null | null | null |
workspace/module/python-2.7/LxBasic/.test/_test_re.py
|
no7hings/lynxi
|
43c745198a714c2e5aca86c6d7a014adeeb9abf7
|
[
"MIT"
] | null | null | null |
# coding:utf-8
import re
#
print re.findall(r'^ab', 'abeerw')
print re.findall(r'rws', 'abeerw')
| 14
| 34
| 0.663265
| 17
| 98
| 3.823529
| 0.647059
| 0.215385
| 0.430769
| 0.461538
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011628
| 0.122449
| 98
| 6
| 35
| 16.333333
| 0.744186
| 0.122449
| 0
| 0
| 0
| 0
| 0.216867
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.333333
| null | null | 0.666667
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 0
| 0
| 1
|
0
| 8
|
4a3f3a4589b94735ad995c29e52295024e92ca81
| 22,757
|
py
|
Python
|
library/scrollphathd/fonts/fontorgan.py
|
Kisty/scroll-phat-hd
|
74a16305574ecd708f3a1c8ce667bf114675c5c7
|
[
"MIT"
] | 155
|
2017-02-28T15:33:17.000Z
|
2021-12-15T15:53:52.000Z
|
library/scrollphathd/fonts/fontorgan.py
|
Kisty/scroll-phat-hd
|
74a16305574ecd708f3a1c8ce667bf114675c5c7
|
[
"MIT"
] | 59
|
2017-03-02T23:46:13.000Z
|
2022-02-09T17:44:27.000Z
|
library/scrollphathd/fonts/fontorgan.py
|
Kisty/scroll-phat-hd
|
74a16305574ecd708f3a1c8ce667bf114675c5c7
|
[
"MIT"
] | 75
|
2017-02-28T10:22:00.000Z
|
2022-02-04T12:59:50.000Z
|
data = {
0x00000030: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff]],
0x00000031: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,0x00,0x00,0x00],
[0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x00,0x00,0x00,0x00,0x00],
[0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x00,0x00,0x00,0x00,0x00],
[0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x00,0x00,0x00,0x00,0x00],
[0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x00,0x00,0x00,0x00,0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff]],
0x00000032: [[0x00,0x00,0x00,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff]],
0x00000033: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff]],
0x00000034: [[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff]],
0x00000035: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff]],
0x00000036: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff]],
0x00000037: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff]],
0x00000038: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff]],
0x00000039: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff]],
0x00000041: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff]],
0x00000042: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,0x00],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x00,0x00],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x00,0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff]],
0x00000043: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff]],
0x00000044: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0x00,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0x00,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0x00,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0x00,0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff]],
0x00000045: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff]],
0x00000046: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00]],
0x00000047: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0xff,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff]],
0x00000048: [[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff]],
0x00000049: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x00,0x00,0x00,0x00,0x00],
[0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x00,0x00,0x00,0x00,0x00],
[0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x00,0x00,0x00,0x00,0x00],
[0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x00,0x00,0x00,0x00,0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff]],
0x0000004a: [[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,
0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,
0x00]],
0x0000004b: [[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,
0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff]],
0x0000004c: [[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff]],
0x0000004d: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0xff]],
0x0000004e: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff]],
0x0000004f: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff]],
0x00000050: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00]],
0x00000051: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0xff,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff]],
0x00000052: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,
0x00]],
0x00000053: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff]],
0x00000054: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0x00],
[0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0x00],
[0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0x00],
[0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0x00],
[0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0x00],
[0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0x00]],
0x00000055: [[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff]],
0x00000056: [[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00]],
0x00000057: [[0xff,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff]],
0x00000058: [[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0x00,0x00,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0x00,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff]],
0x00000059: [[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0x00],
[0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0x00],
[0x00,0x00,0x00,0x00,0x00,0x00,0xff,0x00,0x00,0x00,0x00,0x00,
0x00]],
0x0000005a: [[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0xff],
[0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0xff],
[0x00,0x00,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0x00],
[0xff,0xff,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,
0x00],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff],
[0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,0xff,
0xff]],
}
width = 13
height = 7
| 48.626068
| 76
| 0.578899
| 3,240
| 22,757
| 4.066049
| 0.013272
| 0.909671
| 1.186883
| 1.356612
| 0.971307
| 0.971307
| 0.971307
| 0.971307
| 0.971307
| 0.971307
| 0
| 0.407357
| 0.253417
| 22,757
| 467
| 77
| 48.730193
| 0.36804
| 0
| 0
| 0.890951
| 0
| 0
| 0
| 0
| 0
| 1
| 0.578108
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 14
|
436f46eaf4e6479cc2a289e73453cd26fd83004b
| 1,252
|
py
|
Python
|
catkin_ws/build_isolated/velodyne_gazebo_plugins/catkin_generated/pkg.installspace.context.pc.py
|
LiuXiang199x/DRL_Navigation
|
336e847bde8261d429fd2de8111b3d24c0ab4bae
|
[
"MIT"
] | null | null | null |
catkin_ws/build_isolated/velodyne_gazebo_plugins/catkin_generated/pkg.installspace.context.pc.py
|
LiuXiang199x/DRL_Navigation
|
336e847bde8261d429fd2de8111b3d24c0ab4bae
|
[
"MIT"
] | null | null | null |
catkin_ws/build_isolated/velodyne_gazebo_plugins/catkin_generated/pkg.installspace.context.pc.py
|
LiuXiang199x/DRL_Navigation
|
336e847bde8261d429fd2de8111b3d24c0ab4bae
|
[
"MIT"
] | null | null | null |
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "${prefix}/include;/usr/include;/usr/include/gazebo-11;/usr/include/bullet;/usr/include/simbody;/usr/include/sdformat-9.7;/usr/include/ignition/math6;/usr/include/OGRE;/usr/include/OGRE/Terrain;/usr/include/OGRE/Paging;/usr/include/ignition/transport8;/usr/include/ignition/msgs5;/usr/include/ignition/common3;/usr/include/ignition/fuel_tools4".split(';') if "${prefix}/include;/usr/include;/usr/include/gazebo-11;/usr/include/bullet;/usr/include/simbody;/usr/include/sdformat-9.7;/usr/include/ignition/math6;/usr/include/OGRE;/usr/include/OGRE/Terrain;/usr/include/OGRE/Paging;/usr/include/ignition/transport8;/usr/include/ignition/msgs5;/usr/include/ignition/common3;/usr/include/ignition/fuel_tools4" != "" else []
PROJECT_CATKIN_DEPENDS = "roscpp;sensor_msgs;gazebo_ros".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "-lgazebo_ros_velodyne_laser;-lgazebo_ros_velodyne_gpu_laser".split(';') if "-lgazebo_ros_velodyne_laser;-lgazebo_ros_velodyne_gpu_laser" != "" else []
PROJECT_NAME = "velodyne_gazebo_plugins"
PROJECT_SPACE_DIR = "/home/agent/ROS/DRL-robot-navigation/catkin_ws/install_isolated"
PROJECT_VERSION = "1.0.10"
| 139.111111
| 751
| 0.798722
| 180
| 1,252
| 5.344444
| 0.355556
| 0.27027
| 0.18711
| 0.047817
| 0.675676
| 0.675676
| 0.675676
| 0.675676
| 0.675676
| 0.573805
| 0
| 0.018137
| 0.03115
| 1,252
| 8
| 752
| 156.5
| 0.774938
| 0.043131
| 0
| 0
| 1
| 0.285714
| 0.775084
| 0.766722
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
437a71fa19f6f894e075b6d97fbb0d659c82207c
| 32,316
|
py
|
Python
|
scripts/titans.py
|
Mr-TalhaIlyas/DAM-Hierarchical-Adaptive-Feature-Selection-Using-Convolution-Encoder-Decoder-Network-for-Strawberry
|
a63122133b31c6316b7987d0da558a0d721d02f5
|
[
"CC-BY-4.0"
] | null | null | null |
scripts/titans.py
|
Mr-TalhaIlyas/DAM-Hierarchical-Adaptive-Feature-Selection-Using-Convolution-Encoder-Decoder-Network-for-Strawberry
|
a63122133b31c6316b7987d0da558a0d721d02f5
|
[
"CC-BY-4.0"
] | null | null | null |
scripts/titans.py
|
Mr-TalhaIlyas/DAM-Hierarchical-Adaptive-Feature-Selection-Using-Convolution-Encoder-Decoder-Network-for-Strawberry
|
a63122133b31c6316b7987d0da558a0d721d02f5
|
[
"CC-BY-4.0"
] | null | null | null |
'''
Bi-Isame-Allah
'''
import tensorflow as tf
import numpy as np
from conv_blocks_1 import SE_ResNet, conv2d_block, SE_ResNet0, SE_block
from models import use_customdropout, num_of_classes
if tf.__version__ == '2.2.0' or tf.__version__ == '2.0.0' or tf.__version__ == '2.2.0-rc2':
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, BatchNormalization, Activation, Dropout, SpatialDropout2D, PReLU, Lambda
from tensorflow.keras.layers import concatenate
from tensorflow.keras.regularizers import l2, l1
from tensorflow.keras.models import Model
from tensorflow.keras.layers import Input, BatchNormalization, Activation, Dropout, ZeroPadding2D, Reshape, Lambda
from tensorflow.keras.layers import GlobalAveragePooling2D, Dense, Permute, multiply, add
from tensorflow.keras.layers import Conv2D, Conv2DTranspose, SeparableConv2D, DepthwiseConv2D, UpSampling2D
from tensorflow.keras.layers import MaxPooling2D, GlobalAveragePooling2D, AveragePooling2D, GlobalMaxPooling2D
from tensorflow.keras.backend import int_shape
if tf.__version__ == '1.15.0' or tf.__version__ == '1.13.1':
from keras.models import Model
from keras.layers import Input, BatchNormalization, Activation, Dropout, SpatialDropout2D, PReLU, Lambda
from keras.layers import concatenate
from keras.regularizers import l2, l1
from keras.models import Model
from keras.layers import Input, BatchNormalization, Activation, Dropout, ZeroPadding2D, Reshape, Lambda
from keras.layers import GlobalAveragePooling2D, Dense, Permute, multiply, add
from keras.layers import Conv2D, Conv2DTranspose, SeparableConv2D, DepthwiseConv2D, UpSampling2D
from keras.layers import MaxPooling2D, GlobalAveragePooling2D, AveragePooling2D, GlobalMaxPooling2D
from keras.backend import int_shape
use_mydropout = use_customdropout()
if use_mydropout == True:
from layers import Dropout
elif use_mydropout == False:
if tf.__version__ == '1.15.0' or tf.__version__ == '1.13.0':
from keras.layers import Dropout
if tf.__version__ == '2.2.0' or tf.__version__ == '2.0.0':
from tensorflow.keras.layers import Dropout
num_class = num_of_classes()
if num_class ==2:
output_ch = 1
else:
output_ch = num_class
#%%
def APP0(input_tensor, pool_window):
avg = AveragePooling2D(pool_size=(pool_window,pool_window))(input_tensor)
maax = MaxPooling2D(pool_size=(pool_window,pool_window))(input_tensor)
x_op = add([avg, maax])
return x_op
def DAM(l_f_maps, h_f_maps, n_filters, g_kernal, image, pool_w, activation):
#filters_r = int(n_filters/4)
x_ip = Conv2D(48, kernel_size = (1, 1), kernel_initializer = 'he_normal', padding = 'same')(l_f_maps)
img = APP0(image, pool_window = pool_w)
x_ip = concatenate([x_ip, img])
# Global Conv
x1 = Conv2D(n_filters, kernel_size = (g_kernal,1), kernel_initializer = 'he_normal', padding = 'same')(x_ip)
x1 = BatchNormalization()(x1)
x1 = Activation(activation)(x1)
x2 = Conv2D(n_filters, kernel_size = (1,g_kernal), kernel_initializer = 'he_normal', padding = 'same')(x_ip)
x2 = BatchNormalization()(x2)
x2 = Activation(activation)(x2)
xd0 = add([x1,x2])
x3 = DepthwiseConv2D(kernel_size = (3, 3), depthwise_initializer='he_normal', padding = 'same')(xd0)
x3 = BatchNormalization()(x3)
x3 = Activation(activation)(x3)
x4 = DepthwiseConv2D(kernel_size = (3, 3), depthwise_initializer='he_normal', padding = 'same')(xd0)
x4 = BatchNormalization()(x4)
x4 = Activation(activation)(x4)
avg = AveragePooling2D()(x3)
avg = SE_block(avg, n_filters, ratio = 4)#2
maax = MaxPooling2D()(x4)
maax = SE_block(maax, n_filters, ratio = 4)#2
x_sum = add([avg,maax])
x_sig = Activation(activation)(x_sum)
x_c = concatenate([x_sig,h_f_maps])#,img
#x_op = Conv2D(filters = n_filters, kernel_size = (1, 1), kernel_initializer = 'he_normal', padding = 'same')(x_c)
x_op = SE_ResNet(x_c, n_filters, kernel_size = 3, batchnorm = True, dil_rate = 1)
return x_op
def PDC(input_tensor, n_filters, dropout, activation='relu'):
x1 = SeparableConv2D(filters = 512, kernel_size = (1, 1), depthwise_initializer='he_normal', pointwise_initializer='he_normal', padding = 'same')(input_tensor)
x1 = BatchNormalization()(x1)
x8 = SeparableConv2D(filters = 512, kernel_size = (3, 3), dilation_rate = 6, depthwise_initializer='he_normal', pointwise_initializer='he_normal', padding = 'same')(input_tensor)
x8 = BatchNormalization()(x8)
x16 = SeparableConv2D(filters = 512, kernel_size = (3, 3), dilation_rate = 12, depthwise_initializer='he_normal', pointwise_initializer='he_normal', padding = 'same')(input_tensor)
x16 = BatchNormalization()(x16)
x24 = SeparableConv2D(filters = 512, kernel_size = (3, 3), dilation_rate = 18, depthwise_initializer='he_normal', pointwise_initializer='he_normal', padding = 'same')(input_tensor)
x24 = BatchNormalization()(x24)
c = concatenate([x1, x8, x16, x24])
c = Conv2D(filters = n_filters, kernel_size = (1,1), kernel_initializer = 'he_normal', padding = 'same')(c)
c = BatchNormalization()(c)
c = Activation(activation)(c)
c = Dropout(dropout)(c)
return c
def ASPP_v3(input_tensor, img, dropout, downsample_by, activation='relu'):
x1 = SeparableConv2D(filters = 256, kernel_size = (1, 1), depthwise_initializer='he_normal', pointwise_initializer='he_normal', padding = 'same')(input_tensor)
x1 = BatchNormalization()(x1)
x8 = SeparableConv2D(filters = 256, kernel_size = (3, 3), dilation_rate = 6, depthwise_initializer='he_normal', pointwise_initializer='he_normal', padding = 'same')(input_tensor)
x8 = BatchNormalization()(x8)
x16 = SeparableConv2D(filters = 256, kernel_size = (3, 3), dilation_rate = 12, depthwise_initializer='he_normal', pointwise_initializer='he_normal', padding = 'same')(input_tensor)
x16 = BatchNormalization()(x16)
x24 = SeparableConv2D(filters = 256, kernel_size = (3, 3), dilation_rate = 18, depthwise_initializer='he_normal', pointwise_initializer='he_normal', padding = 'same')(input_tensor)
x24 = BatchNormalization()(x24)
img = AveragePooling2D(pool_size=downsample_by, strides=downsample_by, padding='same')(img)
c = concatenate([x1, x8, x16, x24, img])
c = Conv2D(filters = 256, kernel_size = (1,1), kernel_initializer = 'he_normal', padding = 'same')(c)
c = BatchNormalization()(c)
c = Activation(activation)(c)
c = Dropout(dropout)(c)
return c
def Xception(input_img, n_filters, dropout, activation = 'relu', m_flow = 16, batchnorm = True):
# Entry FLow
#input
ip = Conv2D(filters = 32, kernel_size = (3,3), kernel_initializer = 'he_normal', strides = (2,2), padding = 'same')(input_img)
ip = BatchNormalization()(ip)
ip = Activation(activation)(ip)
ip = Conv2D(filters = 64, kernel_size = (3,3), kernel_initializer = 'he_normal',padding = 'same')(ip)
ip = BatchNormalization()(ip)
ip = Activation(activation)(ip) # *******___1/2 times smaller than ip___********
# 1st Residual connection
res1 = Conv2D(filters = 128, kernel_size = (1,1), kernel_initializer = 'he_normal', strides = (2,2), padding = 'same')(ip)
res1 = BatchNormalization()(res1)
# Block 1
b1 = SeparableConv2D(filters = 128, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(ip)
b1 = BatchNormalization()(b1)
b1 = Activation(activation)(b1)
b1 = Dropout(dropout)(b1)
b1 = SeparableConv2D(filters = 128, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b1)
b1 = BatchNormalization()(b1)
b1 = Activation(activation)(b1)
b1 = Dropout(dropout)(b1)
b1 = SeparableConv2D(filters = 128, kernel_size = (3, 3), strides = (2,2), depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b1)
b1 = BatchNormalization()(b1)
b1 = Activation(activation)(b1) # *******___1/4 times smaller than ip___********
b1 = Dropout(dropout)(b1)
b1 = add([b1, res1])
# 2nd Residual connection
res2 = Conv2D(filters = 256, kernel_size = (1,1), kernel_initializer = 'he_normal', strides = (2,2))(b1)
res2 = BatchNormalization()(res2)
# Block 2
b2 = SeparableConv2D(filters = 256, kernel_size = (3, 3), dilation_rate = 2, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b1)
b2 = BatchNormalization()(b2)
b2 = Activation(activation)(b2)
b2 = Dropout(dropout)(b2)
b2 = SeparableConv2D(filters = 256, kernel_size = (3, 3), dilation_rate = 2, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b2)
b2 = BatchNormalization()(b2)
b2 = Activation(activation)(b2)
b2 = Dropout(dropout)(b2)
b2 = SeparableConv2D(filters = 256, kernel_size = (3, 3), strides = (2,2), depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b2)
b2 = BatchNormalization()(b2)
b2 = Activation(activation)(b2) # *******___1/8 times smaller than ip___********
b2 = Dropout(dropout)(b2)
b2 = add([b2, res2])
# 3rd Residual connection
res3 = Conv2D(filters = 768, kernel_size = (1,1), kernel_initializer = 'he_normal', strides = (2,2), padding = 'same')(b2)
res3 = BatchNormalization()(res3)
# Block 3
b3 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 4, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b2)
b3 = BatchNormalization()(b3)
b3 = Activation(activation)(b3)
b3 = Dropout(dropout)(b3)
b3 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 4, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b3)
b3 = BatchNormalization()(b3)
b3 = Activation(activation)(b3)
b3 = Dropout(dropout)(b3)
b3 = SeparableConv2D(filters = 768, kernel_size = (3, 3), strides = (2,2), depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b3)
b3 = BatchNormalization()(b3)
b3 = Activation(activation)(b3) # *******___1/16 times smaller than ip___********
b3 = Dropout(dropout)(b3)
b3 = add([b3, res3])
# Middle Flow
# 4th residual connection 8
res4 = b3
for i in range(m_flow):
b4 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 8, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b3)
b4 = BatchNormalization()(b4)
b4 = Activation(activation)(b4)
b4 = Dropout(dropout)(b4)
b4 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 8, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b4)
b4 = BatchNormalization()(b4)
b4 = Activation(activation)(b4)
b4 = Dropout(dropout)(b4)
b4 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 8, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b4)
b4 = BatchNormalization()(b4)
b4 = Activation(activation)(b4)
b4 = Dropout(dropout)(b4)
b4 = add([b4, res4])
res4 = b4
# Exit Flow
# 5th residual connection
res5 = Conv2D(filters = 1024, kernel_size = (1,1), kernel_initializer = 'he_normal', strides = (1,1), padding = 'same')(b4)
res5 = BatchNormalization()(res5)
# Block 5 2
b5 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b4)
b5 = BatchNormalization()(b5)
b5 = Activation(activation)(b5)
b5 = Dropout(dropout)(b5)
b5 = SeparableConv2D(filters = 1024, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b5)
b5 = BatchNormalization()(b5)
b5 = Activation(activation)(b5)
b5 = Dropout(dropout)(b5)
b5 = SeparableConv2D(filters = 1024, kernel_size = (3, 3), strides = (1,1), depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b5)
b5 = BatchNormalization()(b5)
b5 = Activation(activation)(b5) # *******___1/32 times smaller than ip___********
b5 = Dropout(dropout)(b5)
b5 = add([b5, res5])
# Block 6
b6 = SeparableConv2D(filters = 1536, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b5)
b6 = BatchNormalization()(b6)
b6 = Activation(activation)(b6)
b6 = Dropout(dropout)(b6)
b6 = SeparableConv2D(filters = 1536, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b6)
b6 = BatchNormalization()(b6)
b6 = Activation(activation)(b6)
b6 = Dropout(dropout)(b6)
b6 = SeparableConv2D(filters = 2048, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b6)
b6 = BatchNormalization()(b6)
b6 = Activation(activation)(b6)
b6 = Dropout(dropout)(b6)
#*************************************************************************
# Encoder to Decoder
#*************************************************************************
#Transition
ctr = ASPP_v3(b6, input_img, dropout, downsample_by = 16)
# Upsampling
up = UpSampling2D(size=((4,4)), interpolation='bilinear')(ctr)#x4 times upsample
up1 = Conv2D(48, kernel_size = (1, 1), kernel_initializer = 'he_normal', padding = 'same')(b1)
up1 = BatchNormalization()(up1)
up1 = Activation(activation)(up1)
upc = concatenate([up1, up])
up2 = Conv2D(256, kernel_size = (3, 3), kernel_initializer = 'he_normal', padding = 'same')(upc)
up2 = BatchNormalization()(up2)
up2 = Activation(activation)(up2)
up2 = UpSampling2D(size=((4,4)), interpolation='bilinear')(up2)#x4 times upsample
outputs = Conv2D(output_ch, (1, 1), kernel_initializer = 'he_normal')(up2)#, activation='softmax'
model = Model(inputs=[input_img], outputs=[outputs])
return model
def Xception_v2(input_img, n_filters, dropout, activation = 'relu', m_flow = 16, batchnorm = True):#5e-4
# Entry FLow
#input
ip = Conv2D(filters = 32, kernel_size = (3,3), kernel_initializer = 'he_normal', strides = (2,2), padding = 'same')(input_img)
ip = BatchNormalization()(ip)
ip = Activation(activation)(ip)
ip = Conv2D(filters = 64, kernel_size = (3,3), kernel_initializer = 'he_normal', padding = 'same')(ip)
ip = BatchNormalization()(ip)
ip = Activation(activation)(ip) # *******___1/2 times smaller than ip___********
# 1st Residual connection
res1 = Conv2D(filters = 128, kernel_size = (1,1), kernel_initializer = 'he_normal', strides = (2,2), padding = 'same')(ip)
res1 = BatchNormalization()(res1)
# Block 1
b1 = SeparableConv2D(filters = 128, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(ip)
b1 = BatchNormalization()(b1)
b1 = Activation(activation)(b1)
b1 = Dropout(dropout)(b1)
b1 = SeparableConv2D(filters = 128, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b1)
b1 = BatchNormalization()(b1)
b1 = Activation(activation)(b1)
b1 = Dropout(dropout)(b1)
b1 = SeparableConv2D(filters = 128, kernel_size = (3, 3), strides = (2,2), depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b1)
b1 = BatchNormalization()(b1)
b1 = Activation(activation)(b1) # *******___1/4 times smaller than ip___********
b1 = Dropout(dropout)(b1)
b1 = add([b1, res1])
# 2nd Residual connection
res2 = Conv2D(filters = 256, kernel_size = (1,1), kernel_initializer = 'he_normal', strides = (2,2))(b1)
res2 = BatchNormalization()(res2)
# Block 2
b2 = SeparableConv2D(filters = 256, kernel_size = (3, 3), dilation_rate = 2, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b1)
b2 = BatchNormalization()(b2)
b2 = Activation(activation)(b2)
b2 = Dropout(dropout)(b2)
b2 = SeparableConv2D(filters = 256, kernel_size = (3, 3), dilation_rate = 2, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b2)
b2 = BatchNormalization()(b2)
b2 = Activation(activation)(b2)
b2 = Dropout(dropout)(b2)
b2 = SeparableConv2D(filters = 256, kernel_size = (3, 3), strides = (2,2), depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b2)
b2 = BatchNormalization()(b2)
b2 = Activation(activation)(b2) # *******___1/8 times smaller than ip___********
b2 = Dropout(dropout)(b2)
b2 = add([b2, res2])
# 3rd Residual connection
res3 = Conv2D(filters = 768, kernel_size = (1,1), kernel_initializer = 'he_normal', strides = (2,2), padding = 'same')(b2)
res3 = BatchNormalization()(res3)
# Block 3
b3 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 4, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b2)
b3 = BatchNormalization()(b3)
b3 = Activation(activation)(b3)
b3 = Dropout(dropout)(b3)
b3 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 4, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b3)
b3 = BatchNormalization()(b3)
b3 = Activation(activation)(b3)
b3 = Dropout(dropout)(b3)
b3 = SeparableConv2D(filters = 768, kernel_size = (3, 3), strides = (2,2), depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b3)
b3 = BatchNormalization()(b3)
b3 = Activation(activation)(b3) # *******___1/16 times smaller than ip___********
b3 = Dropout(dropout)(b3)
b3 = add([b3, res3])
# Middle Flow
# 4th residual connection 8
res4 = b3
for i in range(m_flow):
b4 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 8, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b3)
b4 = BatchNormalization()(b4)
b4 = Activation(activation)(b4)
b4 = Dropout(dropout)(b4)
b4 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 8, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b4)
b4 = BatchNormalization()(b4)
b4 = Activation(activation)(b4)
b4 = Dropout(dropout)(b4)
b4 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 8, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b4)
b4 = BatchNormalization()(b4)
b4 = Activation(activation)(b4)
b4 = Dropout(dropout)(b4)
b4 = add([b4, res4])
res4 = b4
# Exit Flow
# 5th residual connection
res5 = Conv2D(filters = 1024, kernel_size = (1,1), kernel_initializer = 'he_normal', strides = (1,1), padding = 'same')(b4)
res5 = BatchNormalization()(res5)
# Block 5 2
b5 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b4)
b5 = BatchNormalization()(b5)
b5 = Activation(activation)(b5)
b5 = Dropout(dropout)(b5)
b5 = SeparableConv2D(filters = 1024, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b5)
b5 = BatchNormalization()(b5)
b5 = Activation(activation)(b5)
b5 = Dropout(dropout)(b5)
b5 = SeparableConv2D(filters = 1024, kernel_size = (3, 3), strides = (1,1), depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b5)
b5 = BatchNormalization()(b5)
b5 = Activation(activation)(b5) # *******___1/32 times smaller than ip___********
b5 = Dropout(dropout)(b5)
b5 = add([b5, res5])
# Block 6
b6 = SeparableConv2D(filters = 1536, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b5)
b6 = BatchNormalization()(b6)
b6 = Activation(activation)(b6)
b6 = Dropout(dropout)(b6)
b6 = SeparableConv2D(filters = 1536, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b6)
b6 = BatchNormalization()(b6)
b6 = Activation(activation)(b6)
b6 = Dropout(dropout)(b6)
b6 = SeparableConv2D(filters = 2048, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b6)
b6 = BatchNormalization()(b6)
b6 = Activation(activation)(b6)
b6 = Dropout(dropout)(b6)
#*************************************************************************
# Encoder to Decoder
#*************************************************************************
#Transition
ctr = ASPP_v3(b6, input_img, dropout, downsample_by = 16)
# Upsampling
up = UpSampling2D(size=((2,2)), interpolation='bilinear')(ctr)#x2 times upsample
up1 = Conv2D(48, kernel_size = (1, 1), kernel_initializer = 'he_normal', padding = 'same')(b2)
up1 = BatchNormalization()(up1)
up1 = Activation(activation)(up1)
upc = concatenate([up1, up])
up2 = Conv2D(256, kernel_size = (3, 3), kernel_initializer = 'he_normal', padding = 'same')(upc)
up2 = BatchNormalization()(up2)
up2 = Activation(activation)(up2)
up2 = UpSampling2D(size=((2,2)), interpolation='bilinear')(up2)#x2 times upsample
up3 = Conv2D(48, kernel_size = (1, 1), kernel_initializer = 'he_normal', padding = 'same')(b1)
up3 = BatchNormalization()(up3)
up3 = Activation(activation)(up3)
upc2 = concatenate([up2, up3])
upc2 = Conv2D(256, kernel_size = (3, 3), kernel_initializer = 'he_normal', padding = 'same')(upc2)
upc2 = BatchNormalization()(upc2)
upc2 = Activation(activation)(upc2)
upc2 = UpSampling2D(size=((2,2)), interpolation='bilinear')(upc2)#x2 times upsample
up4 = Conv2D(18, kernel_size = (1, 1), kernel_initializer = 'he_normal', padding = 'same')(ip)
up4 = BatchNormalization()(up4)
up4 = Activation(activation)(up4)
upc3 = concatenate([upc2, up4])
upc3 = Conv2D(256, kernel_size = (3, 3), kernel_initializer = 'he_normal', padding = 'same')(upc3)
upc3 = BatchNormalization()(upc3)
upc3 = Activation(activation)(upc3)
upc3 = UpSampling2D(size=((2,2)), interpolation='bilinear')(upc3)
outputs = Conv2D(output_ch, (1, 1), kernel_initializer = 'he_normal')(upc3)#, activation='softmax'
model = Model(inputs=[input_img], outputs=[outputs])
return model
def Xception_seed(input_img, n_filters, dropout, activation = 'relu', m_flow = 16, batchnorm = True):
# Entry FLow
#input
ip = Conv2D(filters = 32, kernel_size = (3,3), kernel_initializer = 'he_normal', strides = (2,2), padding = 'same')(input_img)
ip = BatchNormalization()(ip)
ip = Activation(activation)(ip)
ip = Conv2D(filters = 64, kernel_size = (3,3), kernel_initializer = 'he_normal', padding = 'same')(ip)
ip = BatchNormalization()(ip)
ip = Activation(activation)(ip) # *******___1/2 times smaller than ip___********
# 1st Residual connection
res1 = Conv2D(filters = 128, kernel_size = (1,1), kernel_initializer = 'he_normal', strides = (2,2), padding = 'same')(ip)
res1 = BatchNormalization()(res1)
# Block 1
b1 = SeparableConv2D(filters = 128, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(ip)
b1 = BatchNormalization()(b1)
b1 = Activation(activation)(b1)
b1 = Dropout(dropout)(b1)
b1 = SeparableConv2D(filters = 128, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b1)
b1 = BatchNormalization()(b1)
b1 = Activation(activation)(b1)
b1t = Dropout(dropout)(b1)
b1 = SeparableConv2D(filters = 128, kernel_size = (3, 3), strides = (2,2), depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b1t)
b1 = BatchNormalization()(b1)
b1 = Activation(activation)(b1) # *******___1/4 times smaller than ip___********
b1 = Dropout(dropout)(b1)
b1 = add([b1, res1])
# 2nd Residual connection
res2 = Conv2D(filters = 256, kernel_size = (1,1), kernel_initializer = 'he_normal', strides = (2,2))(b1)
res2 = BatchNormalization()(res2)
# Block 2
b2 = SeparableConv2D(filters = 256, kernel_size = (3, 3), dilation_rate = 2, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b1)
b2 = BatchNormalization()(b2)
b2 = Activation(activation)(b2)
b2 = Dropout(dropout)(b2)
b2 = SeparableConv2D(filters = 256, kernel_size = (3, 3), dilation_rate = 2, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b2)
b2 = BatchNormalization()(b2)
b2 = Activation(activation)(b2)
b2t = Dropout(dropout)(b2)
b2 = SeparableConv2D(filters = 256, kernel_size = (3, 3), strides = (2,2), depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b2t)
b2 = BatchNormalization()(b2)
b2 = Activation(activation)(b2) # *******___1/8 times smaller than ip___********
b2 = Dropout(dropout)(b2)
b2 = add([b2, res2])
# 3rd Residual connection
res3 = Conv2D(filters = 768, kernel_size = (1,1), kernel_initializer = 'he_normal', strides = (2,2), padding = 'same')(b2)
res3 = BatchNormalization()(res3)
# Block 3
b3 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 4, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b2)
b3 = BatchNormalization()(b3)
b3 = Activation(activation)(b3)
b3 = Dropout(dropout)(b3)
b3 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 4, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b3)
b3 = BatchNormalization()(b3)
b3 = Activation(activation)(b3)
b3t = Dropout(dropout)(b3)
b3 = SeparableConv2D(filters = 768, kernel_size = (3, 3), strides = (2,2), depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b3t)
b3 = BatchNormalization()(b3)
b3 = Activation(activation)(b3) # *******___1/16 times smaller than ip___********
b3 = Dropout(dropout)(b3)
b3 = add([b3, res3])
# Middle Flow
# 4th residual connection 8
res4 = b3
for i in range(m_flow):
b4 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 8, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b3)
b4 = BatchNormalization()(b4)
b4 = Activation(activation)(b4)
b4 = Dropout(dropout)(b4)
b4 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 8, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b4)
b4 = BatchNormalization()(b4)
b4 = Activation(activation)(b4)
b4 = Dropout(dropout)(b4)
b4 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 8, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b4)
b4 = BatchNormalization()(b4)
b4 = Activation(activation)(b4)
b4 = Dropout(dropout)(b4)
b4 = add([b4, res4])
res4 = b4
# Exit Flow
# 5th residual connection
res5 = Conv2D(filters = 1024, kernel_size = (1,1), kernel_initializer = 'he_normal', strides = (1,1), padding = 'same')(b4)
res5 = BatchNormalization()(res5)
# Block 5 2
b5 = SeparableConv2D(filters = 768, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b4)
b5 = BatchNormalization()(b5)
b5 = Activation(activation)(b5)
b5 = Dropout(dropout)(b5)
b5 = SeparableConv2D(filters = 1024, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b5)
b5 = BatchNormalization()(b5)
b5 = Activation(activation)(b5)
b5 = Dropout(dropout)(b5)
b5 = SeparableConv2D(filters = 1024, kernel_size = (3, 3), strides = (1,1), depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b5)
b5 = BatchNormalization()(b5)
b5 = Activation(activation)(b5) # *******___1/32 times smaller than ip___********
b5 = Dropout(dropout)(b5)
b5 = add([b5, res5])
# Block 6
b6 = SeparableConv2D(filters = 1536, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b5)
b6 = BatchNormalization()(b6)
b6 = Activation(activation)(b6)
b6 = Dropout(dropout)(b6)
b6 = SeparableConv2D(filters = 1536, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b6)
b6 = BatchNormalization()(b6)
b6 = Activation(activation)(b6)
b6 = Dropout(dropout)(b6)
b6 = SeparableConv2D(filters = 2048, kernel_size = (3, 3), dilation_rate = 1, depthwise_initializer='he_normal', pointwise_initializer='he_normal',padding = 'same')(b6)
b6 = BatchNormalization()(b6)
b6 = Activation(activation)(b6)
b6 = Dropout(dropout)(b6)
#*************************************************************************
# Encoder to Decoder
#*************************************************************************
#Transition
#connecting encoder decoder
c6 = PDC(b6, 2048, dropout)
#c6 = ASPP_v3(b6, input_img, dropout, downsample_by = 16)
# Expanding Path
dam6 = DAM(b3t, c6, 512, 15, input_img, 8, activation)
u6 = UpSampling2D(interpolation='bilinear')(dam6) # 2x upsample
dam7 = DAM(b2t, u6, 256, 15, input_img, 4, activation)
u7 = UpSampling2D(interpolation='bilinear')(dam7) # 2x upsample
dam8 = DAM(b1t, u7, 128, 15, input_img, 2, activation)
u8 = UpSampling2D(interpolation='bilinear')(dam8) # 2x upsample
up4 = Conv2D(18, kernel_size = (1, 1), kernel_initializer = 'he_normal', padding = 'same')(ip)
up4 = BatchNormalization()(up4)
up4 = Activation(activation)(up4)
u8 = concatenate([u8, up4])
u8 = Conv2D(128, kernel_size = (3, 3), kernel_initializer = 'he_normal', padding = 'same')(u8)
u8 = BatchNormalization()(u8)
u8 = Activation(activation)(u8)
u9 = UpSampling2D(size=((2,2)), interpolation='bilinear')(u8) # 2x upsample
outputs = Conv2D(output_ch, (1, 1), kernel_initializer = 'he_normal')(u9)#, activation='softmax'
model = Model(inputs=[input_img], outputs=[outputs])
return model
| 51.052133
| 185
| 0.648905
| 3,830
| 32,316
| 5.285379
| 0.056136
| 0.104678
| 0.152991
| 0.106605
| 0.909104
| 0.892654
| 0.875068
| 0.875068
| 0.85847
| 0.840933
| 0
| 0.062421
| 0.196404
| 32,316
| 632
| 186
| 51.132911
| 0.717086
| 0.072781
| 0
| 0.7287
| 0
| 0
| 0.068222
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.015695
| false
| 0
| 0.060538
| 0
| 0.091928
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
438290fc8d0af84fdbdf1c3ae50361def574e4f1
| 53,508
|
py
|
Python
|
install/app_store/tk-framework-shotgunutils/v5.2.4/python/shotgun_globals/ui/resources_rc.py
|
JoanAzpeitia/lp_sg
|
e0ee79555e419dd2ae3a5f31e5515b3f40b22a62
|
[
"MIT"
] | null | null | null |
install/app_store/tk-framework-shotgunutils/v5.2.4/python/shotgun_globals/ui/resources_rc.py
|
JoanAzpeitia/lp_sg
|
e0ee79555e419dd2ae3a5f31e5515b3f40b22a62
|
[
"MIT"
] | null | null | null |
install/app_store/tk-framework-shotgunutils/v5.2.4/python/shotgun_globals/ui/resources_rc.py
|
JoanAzpeitia/lp_sg
|
e0ee79555e419dd2ae3a5f31e5515b3f40b22a62
|
[
"MIT"
] | 1
|
2020-02-15T10:42:56.000Z
|
2020-02-15T10:42:56.000Z
|
# -*- coding: utf-8 -*-
# Resource object code
#
# by: The Resource Compiler for PySide (Qt v4.8.7)
#
# WARNING! All changes made in this file will be lost!
from tank.platform.qt import QtCore
qt_resource_data = "\x00\x00\x03\x9c\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x0e\x00\x00\x00\x0e\x08\x06\x00\x00\x00\x1fH-\xd1\x00\x00\x00\x06bKGD\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xe0\x08\x1f\x0e0/\x9c\x93\x9b\x8a\x00\x00\x03)IDAT(\x15\x01\x1e\x03\xe1\xfc\x01\xd7\xb3F\x00\x01\x03\x05\x00\xfc\xf8\xf4\x00\x02\x02\xfd\x00\xfe\xff\x07\x00\xb0\xd5A\x1a\x0a\x0a\x0a\x92\x00\x00\x00\x0c\xf5\xf5\xf5iM\x22\xbc\xdf\xfe\xf9\xf5\x00\xfa\xf9\xfc\x00\x03\x00\xfe\x00\xff\xfb\x03\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfe\x00\xb0\xd6J\x0a\xfd\xfd\xfdu\x19\x19\x18\x80:::\x00\xf6\xf6\xf6\x00\xcc\xcc\xcc\xde\xb2\xddC\x87\xfc\xfc\xfc\x86J\x19\xb1\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x01\xd7\xb3F\x00\x01\x02\x03\x00\xaa\xcd9X\x15\x15\x15\x97>>>\x10***\x00\x00\x00\x00\x00\xc4\xc4\xc4\x00\x1c\x1c\x1c\x00\xd6\xd6\xd6\x00\xd2\xd2\xd2\xf7\xef\xef\xefkT \xb5\x9f\x00\xfc\x05\x00\x01\x8c\x8c\x8c3\xf1\xf1\xf1\x99<<<3<<<\x00\x05\x05\x05\x00\xea\xea\xea\x00\xde\xde\xde\x00\xbd\xbd\xbd\x00888\x00\x1b\x1b\x1b\x00\x05\x05\x05\x00\xd0\xd0\xd0\x00\xd7\xd7\xd7\xd1\xfc\xfc\xfch\x01\x93\x93\x93\xfaLLL\x05\x08\x08\x08\x00\xe3\xe3\xe3\x00\xdb\xdb\xdb\x00\xe2\xe2\xe2\x00\xee\xee\xee\x00aaa\x00\x0c\x0c\x0c\x00\xe7\xe7\xe7\x00\x05\x05\x05\x00\x12\x12\x12\x00\xe7\xe7\xe7\x00\xb4\xb4\xb4\xff\x02\x06\x06\x06\xf6 \x00\xfa\xfa\xfa\x00\xe9\xe9\xe9\x00\x09\x09\x09\x00'''\x00\x17\x17\x17\x00\x13\x13\x13\x00\x0c\x0c\x0c\x00\x0c\x0c\x0c\x00\xda\xda\xda\x00\xb2\xb2\xb2\x00\x05\x05\x05\x00\x08\x08\x08\xf1\x04\xf9\xf9\xf9\xf0\xfb\xfb\xfb\x0f\x0e\x0e\x0e\x00555\x00\xe3\xe3\xe3\x00\xe7\xe7\xe7\x00\x0c\x0c\x0c\x00\xdf\xdf\xdf\x00\xf9\xf9\xf9\x00\xd5\xd5\xd5\x00\xfa\xfa\xfa\x00\xfa\xfa\xfa\x00\xf3\xf3\xf3\x00\xff\xff\xff\xf1\x04\xff\xff\xff\x00\xff\xff\xff\x00\xfa\xfa\xfa\x00\x01\x01\x01\x00\x18\x18\x18\x00\x03\x03\x03\x00\xf2\xf2\xf2\x00\xd1\xd1\xd1\x00\xfa\xfa\xfa\x00\x02\x02\x02\x00\x03\x03\x03\x00\x02\x02\x02\x00\xfd\xfd\xfd\x00\xfb\xfb\xfb\x00\x02\x02\x02\x02\x0c\x06\x06\x06\x00\xfd\xfd\xfd\x00\xf3\xf3\xf3\x00\xf7\xf7\xf7\x00\x0d\x0d\x0d\x00!!!\x00\xfe\xfe\xfe\x00\x01\x01\x01\x00\x00\x00\x00\x00\xf0\xf0\xf0\x00\xf6\xf6\xf6\x00\x06\x06\x06\x00\x01\x01\x01\x0a\x04\xf9\xf9\xf9\x13\xf6\xf6\xf6\x00\x16\x16\x16\x00\xea\xea\xea\x00\xe8\xe8\xe8\x00\x00\x00\x00\x00\x09\x09\x09\x00\x01\x01\x01\x00\xf5\xf5\xf5\x00\xf6\xf6\xf6\x00\x13\x13\x13\x00'''\x00\xfa\xfa\xfa\x00\xf8\xf8\xf8\x15\x03:::\xcd\xcc\xcc\xcc@\xfb\xfb\xfb\x0d%%%\x00\x0f\x0f\x0f\x00\xf2\xf2\xf2\x00\xf3\xf3\xf3\x00\xd6\xd6\xd6\x00\x03\x03\x03\x00555\x00\x22\x22\x22\x00\xe3\xe3\xe3\x00\xc4\xc4\xc4\xec\xf9\xf9\xf9]\x03\x8fa\xfb\xda\xcb\xe2\x15\x94\xd3\xd3\xd3\xf9\xdb\xdb\xdbB\x05\x05\x05\x00+++\x00\x0a\x0a\x0a\x00\xff\xff\xff\x00###\x00\xf4\xf4\xf4\x00\xc1\xc1\xc1\x00\xd2\xd2\xd2\x85\xff\xff\xffTV\x13\xb5\xd2\x01\xcf\xa2=\x00\xfc\xf4\xeb\x00\xfb\xfa\xfc\x00\xa6\xdcH\x19\x03\x03\x03\x86\x1d\x1d\x1d`EEE\x00\xef\xef\xef\x00\xcf\xcf\xcf\x00\xd9\xd9\xd9\xa9\xff\xff\xffwU\x11\xaf\xe1\x04\x06\x01\x00\x01\x01\x0a\x00\x01\xcf\xa2=\x00\xfc\xf4\xeb\x00\xfb\xfb\xfe\x00\xfe\xf9\xf6\x00\x04\x04\x10\x00\xa0\xda<3\x0d\x0d\x0d\x88\x00\x00\x00\x07\xf6\xf6\xf6yX\x16\xb4\xc5\xfc\xfd\xf6\x00\xfd\xfb\x03\x00\x04\x05\xff\x00\x01\x01\x0a\x00QuOn\x83\x04\xf7\x8f\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01.\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x00\xd0IDATx\xdab\xfc\xff\xff?\x03%\x80\x89\x81B\xc0\x88\xcc\x01\xbaF\x18H\xa9\x01\xb1\x08\x10\xff\x04\xe2\xe3\x8c\x8c\x8c\x9f\xf1\x19\xc0\x82\xa4\x99\x17H\xf9\x021\x1b\x92<\xc8\xc0\xe5\xc4zA\x12M3\x08\xf0\x02\x0d\x96\x22\xca\x05 \xc5\xb8\x14\x01\x0d\xe1\x82z\x8d\x19\x88\xff\x02\xf1]\x98\xd7\x90]\xf0\x0b\x8fEbP\xcd\x0cPZ\x92\x94X\xf8\x89\xc5k\x98a\x00t\xd2e \xf5\x1cM\xfe\x01P\xfc-)\xe9\xe0&\x1a\x1f\xa6\xf9\x15\x9a\xf8\x07\x5c\x06\xa0\x87\x83\x02(z\x81\xae\xf8\x80&\xf7\x1d%!\x01\x15\x81\xfc\xa8\x0b\xc5\xd8\xfc\xfb\x00\x1ax\xaf\x90\x5c\xf0\x18h\xf0/Fh<\xbb\xe1\x0b($\xd7\x822\xce\x19d\x83A\x82:Dh\x06\x81\x7fPW\xf0#\x89\xc92\x0exn\x04\x080\x00Ys9S\x88t\x8e\xb5\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01\xa7\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x01IIDATx\xda\xa4S\xb1J\xc4@\x10\xcd\xc6h\xce\xe3\x16\x02g\x93C\xf1*\xad\x0e\x04\x7f\xc0\xca\xca\xca_\xb5\xf2\x0bl-\xac\x82\xc5\x81p6\xa6J\x8c\x9cx\xc6\xf7\xe4-\x8ck\xae\xba\x85\xc7&\xb3;of\xde\xcc\xba\xbe\xef\x93]\x96\xb3? \x9b`\xcb\x80\x03\xe0\x16\xf8\x8e\xee\xbf\xfeqv\xee.\x83\xd3\x99\xb1\x9d\x00\xef\xc0\x08\xd8\x07\xee\x81\x05\xf0`\xeeLdc\xc0\x19\xa3\xed\x01\x9f\xc0\x07\xb0R\x94\x02\xf0\xb2O\x11i\x15e:\xc3\xd6\x90,\x95m\xa3\x9d$s}\xd3\xe9Z\x0e>*e\x0d\x5c2H*\xe7\xa9\x0eZ\xa58R\x84J\x19\xc5\x04\xcclI\xbf\x90\x01\x7f\x0e\x81/\xe0Y\x04\xad\x22\xbd\x894\x16\x93\xe4y&6\xd6<\x96\xc3\x91:\xb1\x0eb\xa9\x8c\xab\x81.6\x81\xa0\x94\xc1\xab\x9c\x8d\x9c\xb9\xbf\xe8\xec)\xea\xc49\xf7L\x06\xa6SB\xed\x0a\x91(d\x81\xefG\x89G\xc7\xd3\x81N\xfc\xea\x16\x08J\xb5\x93\xeb\xd8\xdc\xa3\xfd&\x1e \xad\x85%\xc85}\xc9\x16\xb5\xfd\xc0Y\xcd\xf3Tb-\xcdA+\x84\xeeT[\x9e\x01\xc9\x1b\xa7:\xe7\xca\xa0\xd38w\x22\xb9\xd0<xEL\x8c\xe0$\xa8I\xc0\xdas\x88\xd4I\x9c\xc2D\x19\xdb\xe9\xc3\x9d\xfa\xdfk\xdc\xf59\xff\x080\x00\xcd'p\xb2\xaf\xdas4\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x03\x9c\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x0e\x00\x00\x00\x0e\x08\x06\x00\x00\x00\x1fH-\xd1\x00\x00\x00\x06bKGD\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xe0\x08\x1f\x0e0)u\xf0>\xbf\x00\x00\x03)IDAT(\x15\x01\x1e\x03\xe1\xfc\x01\xd7\xb3F\x00\x01\x03\x05\x00\xfc\xf8\xf4\x00\x02\x02\xfd\x00\xfe\xff\x07\x00\xb0\xd5A\x1a\x0a\x0a\x0a\x92\x00\x00\x00\x0c\xf5\xf5\xf5iM\x22\xbc\xdf\xfe\xf9\xf5\x00\xfa\xf9\xfc\x00\x03\x00\xfe\x00\xff\xfb\x03\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfe\x00\xb0\xd6J\x0a\xfd\xfd\xfdu\x19\x19\x18\x80:::\x00\xf6\xf6\xf6\x00\xcc\xcc\xcc\xde\xb2\xddC\x87\xfc\xfc\xfc\x86J\x19\xb1\xf4\x00\x00\x00\x00\x00\x00\x00\x00\x01\xd7\xb3F\x00\x01\x02\x03\x00\xaa\xcd9X\x15\x15\x15\x97>>>\x10***\x00\x00\x00\x00\x00\xc4\xc4\xc4\x00\x1c\x1c\x1c\x00\xd6\xd6\xd6\x00\xd2\xd2\xd2\xf7\xef\xef\xefkT \xb5\x9f\x00\xfc\x05\x00\x01\x8c\x8c\x8c3\xf1\xf1\xf1\x99<<<3<<<\x00\x05\x05\x05\x00\xea\xea\xea\x00\xde\xde\xde\x00\xbd\xbd\xbd\x00888\x00\x1b\x1b\x1b\x00\x05\x05\x05\x00\xd0\xd0\xd0\x00\xd7\xd7\xd7\xd1\xfc\xfc\xfch\x01\x93\x93\x93\xfaLLL\x05\x08\x08\x08\x00\xe3\xe3\xe3\x00\xdb\xdb\xdb\x00\xe2\xe2\xe2\x00\xee\xee\xee\x00aaa\x00\x0c\x0c\x0c\x00\xe7\xe7\xe7\x00\x05\x05\x05\x00\x12\x12\x12\x00\xe7\xe7\xe7\x00\xb4\xb4\xb4\xff\x02\x06\x06\x06\xf6 \x00\xfa\xfa\xfa\x00\xe9\xe9\xe9\x00\x09\x09\x09\x00'''\x00\x17\x17\x17\x00\x13\x13\x13\x00\x0c\x0c\x0c\x00\x0c\x0c\x0c\x00\xda\xda\xda\x00\xb2\xb2\xb2\x00\x05\x05\x05\x00\x08\x08\x08\xf1\x04\xf9\xf9\xf9\xf0\xfb\xfb\xfb\x0f\x0e\x0e\x0e\x00555\x00\xe3\xe3\xe3\x00\xe7\xe7\xe7\x00\x0c\x0c\x0c\x00\xdf\xdf\xdf\x00\xf9\xf9\xf9\x00\xd5\xd5\xd5\x00\xfa\xfa\xfa\x00\xfa\xfa\xfa\x00\xf3\xf3\xf3\x00\xff\xff\xff\xf1\x04\xff\xff\xff\x00\xff\xff\xff\x00\xfa\xfa\xfa\x00\x01\x01\x01\x00\x18\x18\x18\x00\x03\x03\x03\x00\xf2\xf2\xf2\x00\xd1\xd1\xd1\x00\xfa\xfa\xfa\x00\x02\x02\x02\x00\x03\x03\x03\x00\x02\x02\x02\x00\xfd\xfd\xfd\x00\xfb\xfb\xfb\x00\x02\x02\x02\x02\x0c\x06\x06\x06\x00\xfd\xfd\xfd\x00\xf3\xf3\xf3\x00\xf7\xf7\xf7\x00\x0d\x0d\x0d\x00!!!\x00\xfe\xfe\xfe\x00\x01\x01\x01\x00\x00\x00\x00\x00\xf0\xf0\xf0\x00\xf6\xf6\xf6\x00\x06\x06\x06\x00\x01\x01\x01\x0a\x04\xf9\xf9\xf9\x13\xf6\xf6\xf6\x00\x16\x16\x16\x00\xea\xea\xea\x00\xe8\xe8\xe8\x00\x00\x00\x00\x00\x09\x09\x09\x00\x01\x01\x01\x00\xf5\xf5\xf5\x00\xf6\xf6\xf6\x00\x13\x13\x13\x00'''\x00\xfa\xfa\xfa\x00\xf8\xf8\xf8\x15\x03:::\xcd\xcc\xcc\xcc@\xfb\xfb\xfb\x0d%%%\x00\x0f\x0f\x0f\x00\xf2\xf2\xf2\x00\xf3\xf3\xf3\x00\xd6\xd6\xd6\x00\x03\x03\x03\x00555\x00\x22\x22\x22\x00\xe3\xe3\xe3\x00\xc4\xc4\xc4\xec\xf9\xf9\xf9]\x03\x8fa\xfb\xda\xcb\xe2\x15\x94\xd3\xd3\xd3\xf9\xdb\xdb\xdbB\x05\x05\x05\x00+++\x00\x0a\x0a\x0a\x00\xff\xff\xff\x00###\x00\xf4\xf4\xf4\x00\xc1\xc1\xc1\x00\xd2\xd2\xd2\x85\xff\xff\xffTV\x13\xb5\xd2\x01\xcf\xa2=\x00\xfc\xf4\xeb\x00\xfb\xfa\xfc\x00\xa6\xdcH\x19\x03\x03\x03\x86\x1d\x1d\x1d`EEE\x00\xef\xef\xef\x00\xcf\xcf\xcf\x00\xd9\xd9\xd9\xa9\xff\xff\xffwU\x11\xaf\xe1\x04\x06\x01\x00\x01\x01\x0a\x00\x01\xcf\xa2=\x00\xfc\xf4\xeb\x00\xfb\xfb\xfe\x00\xfe\xf9\xf6\x00\x04\x04\x10\x00\xa0\xda<3\x0d\x0d\x0d\x88\x00\x00\x00\x07\xf6\xf6\xf6yX\x16\xb4\xc5\xfc\xfd\xf6\x00\xfd\xfb\x03\x00\x04\x05\xff\x00\x01\x01\x0a\x00QuOn\x83\x04\xf7\x8f\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x02c\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x02\x05IDATx\xdabd\xc0\x0e\x84\x81X\x07\x88\xf9\x90\xc4\xae\x02\xf1=t\x85L\xd84\x8b\x8a\x8a\xd9\xad[\xbf\xc1\xf4\xe5\xcbW\xaa |\xf2\xd4iuK++\x0b\xa0\x9c\x12\x03\x11\xc0\xa0\xa7\xb7/\xe7\xff\xff\xffi\xc8x\xef\xbe\xfd\xc5@9\x0fb\x5c\xc0\xa5\xab\xab\xfb\x1f]PG[\x1b$\xc6J\x8c\x01X\xc1\xb7\xef\xdfY\xb1\x89c3\x80\x13\x9bB.N\xce\xdf\xc4\x1a\xf0\xe9\xc1\x83\x07\xcc\xe8\x82\x17/]\x02\x89}C\x17g1\x9d}b?\x90\xe6\x85\x09\xfc\xff\xf0\xea\xc2\xc2M\xcbn:\xd8\xdb\xb3\xaa\xa9\xa9~\x86\x89\xf7\xf7\xf5\xbd\x02R\xf71\x0c\x00i>\x95bn\x0c\xf6\xe7\xef\xbf\x7fO<\xfd\xa80OJ\xec\xe5\xd6\xbb\x9f\xaf\xa8\xa91\xec\x85)\xe4\xe3\xe7g\xc1\x16\x88\xcc\xd2~)i\xd7^\x7f\xfd\xb2\xf3\xee\xdb\xb7\xb7\xde~\xfb\xea\xa6,,\x12\xac+#\xba\xfc\xce'\xd6\x97\xef\x7fp\x1a\xc9\xf0?\x00\x87\x0177\xd7\xfd\xfb\xf7\x05\x9e<~,\x04\xe4r\x80\xbc\x0a\xc4\xff\xc0\x06\x04k\x89\x8bH\xf3q\xb0\x7f\xf9\xf5\xf7o\xd7\xb1\x07\x8f\x15\x049\xd9\x93\x0d\xa5\xa5\xe7]x\xc6\xaa\xc8\xc3\xfe\x5c\x8c\x8f\xfd\xa3\xaa\xaa\xca\x1f__?!G''1\x1en\x1e\xe9\xb3g\xcf\x82R\xeb+\xb0\x01\xe9\xc62\x92\x12<ll\xfe\xeab\xe2@\x83X;\x8e\xdc\x7f\xec\xab.*\x22\xca\xc3\xc6\xba\xfd\xd6\xeb_\x0eJ\xc2\xb7@\xae\xe0\xe6\xe6\xfe\x052\xc8\xdd\xdd\x8dEXDTp\xe7\x8e\x1d\xbf\xc0\xb1\x90\xb8\xf1\xea-\x10\xae?p\xf7\xae\x93\x82\x90\xb0\xbb\xb2\x88\xc0\xa2\x8b\xcf\x9fYH\xf3\x0b\x5c{\xf3U\x01\xdd\xdflll\x7fCCB\xb8\x81L\x09P\xc00\x1c\x8871\x00\xd1\x0e\x0b\xcf\x5c\x08\xd3\x16\xff,\xc9\xcb\xce~\xfa\xe9\xc7\xcf\x5c\xac\xcc\xcc\xaf\xdf\xbd\xfd'\xa3\xae\xf7\xf3\xe9\xad\xcb\xef\xd0\xccy\x0e\xc4\xbfY\x08\xa5\xc0\xafl<\x7f\x80\x9a\x17\xe2\x92g\x04\xa6\x834 \x9d\x86\xc7\x8c\xcf\xa7S-\x1cqI\x02\x04\x18\x00\x98D\xc0\x14p\x22\x8cC\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x04c\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x12\x00\x00\x00\x0e\x08\x06\x00\x00\x00\x22\xdaL\xb7\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x03#iTXtXML:com.adobe.xmp\x00\x00\x00\x00\x00<?xpacket begin=\x22\xef\xbb\xbf\x22 id=\x22W5M0MpCehiHzreSzNTczkc9d\x22?> <x:xmpmeta xmlns:x=\x22adobe:ns:meta/\x22 x:xmptk=\x22Adobe XMP Core 5.5-c014 79.151481, 2013/03/13-12:09:15 \x22> <rdf:RDF xmlns:rdf=\x22http://www.w3.org/1999/02/22-rdf-syntax-ns#\x22> <rdf:Description rdf:about=\x22\x22 xmlns:xmp=\x22http://ns.adobe.com/xap/1.0/\x22 xmlns:xmpMM=\x22http://ns.adobe.com/xap/1.0/mm/\x22 xmlns:stRef=\x22http://ns.adobe.com/xap/1.0/sType/ResourceRef#\x22 xmp:CreatorTool=\x22Adobe Photoshop CC (Macintosh)\x22 xmpMM:InstanceID=\x22xmp.iid:E370F22D3B8011E38248A965393E3EC5\x22 xmpMM:DocumentID=\x22xmp.did:E370F22E3B8011E38248A965393E3EC5\x22> <xmpMM:DerivedFrom stRef:instanceID=\x22xmp.iid:144E962A3B7111E38248A965393E3EC5\x22 stRef:documentID=\x22xmp.did:E370F22C3B8011E38248A965393E3EC5\x22/> </rdf:Description> </rdf:RDF> </x:xmpmeta> <?xpacket end=\x22r\x22?>\xce008\x00\x00\x00\xd6IDATx\xdab\xfc\xff\xff?\x03\x01\xc0\x05\xc4\xdf\x08)b\x22B\xde\x84\x81\x08@\xc8 1 V\x22B\x1dA\x05\xd2@\xcc\x06\xc4R\xd40\x08\x04\x14)1\x88\x15\xea5Pl\xc8Sb\x90$T\xfe&4\xe6\xc4\xa0\xe22\xa4\x1a\x04\xf2\xd6\x1f >\x0d\xc4\xff\xa0\xde\xe3\x00b\x0f 6FW\xcc\x82\xc4\x96\x82\xba\x82\x01)\x5c^\x02\xf1w ~\x06\xc4\x0a@\xfc\x15j9\xc8 F\xa8k\x7f\x00\xf1oF\xa4\x04i\x8c\xc5\xa6S@|\x01\x88\xb5\x80\xd8\x06j\x10\xc8\xe0\xb7@\xac\x0eUs\x00\x88o1\x12\x91\xb2a\xa9;\x06\xca>\x06\xc4W\xa1\xe9\x8b\x19\x88_\x00\xf1'\x16\x06\xe2\x00(\x8b\xbc\x02b\x11 \xbe\x03\x8d\xc9\xbb\xb8\xc2\x88\x10x\x00\xf5\xd6\x0fl\x92\xa4\x18t\x1f\x88?\xe2\x92\x04\x080\x00\xed\x15,i\x8f\x8c\x18:\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x07\x12\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x04$iCCPICC Profile\x00\x008\x11\x85U\xdfo\xdbT\x14>\x89oR\xa4\x16? XG\x87\x8a\xc5\xafUS[\xb9\x1b\x1a\xad\xc6\x06I\x93\xa5\xedJ\x16\xa5\xe9\xd8*$\xe4:7\x89\xa9\x1b\x07\xdb\xe9\xb6\xaaO{\x817\x06\xfc\x01@\xd9\x03\x0fH<!\x0d\x06b{\xd9\xf6\xc0\xb4IS\x87*\xaaIH{\xe8\xc4\x0f!&\xed\x05U\xe1\xbbvb'S\xc4\x5c\xf5\xfa\xcb9\xdf9\xe7;\xe7^\xdbD=_i\xb5\x9a\x19U\x88\x96\xab\xae\x9d\xcf$\x95\x93\xa7\x16\x94\x9eM\x8a\xd2\xb3\xd4K\x03\xd4\xab\xe9N-\x91\xcb\xcd\x12.\xc1\x15\xf7\xce\xeb\xe1\x1d\x8a\x08\xcb\xed\x91\xee\xfeN\xf6#\xbfz\x8b\xdc\xd1\x89\x22O\xc0n\x15\x1d}\x19\xf8\x0cQ\xcc\xd4k\xb6K\x14\xbf\x07\xfb\xf8i\xb7\x06\xdc\xf3\x1c\xf0\xd36\x04\x02\xab\x02\x97}\x9c\x12x\xd1\xc7'=N!?\x09\x8e\xd0*\xeb\x15\xad\x08\xbc\x06<\xbc\xd8f/\xb7a_\x03\x18\xc8\x93\xe1Un\x1b\xba\x22f\x91\xb3\xad\x92ar\xcf\xe1/\x8fq\xb71\xff\x0f.\x9bu\xf4\xec]\x83X\xfb\x9c\xa5\xb9c\xb8\x0f\x89\xde+\xf6T\xbe\x89?\xd7\xb5\xf4\x1c\xf0K\xb0_\xaf\xb9Ia\x7f\x05\xf8\x8f\xfa\xd2|\x02x\x1fQ\xf4\xa9\x92}t\xde\xe7G__\xad\x14\xde\x06\xde\x05{\xd1p\xa7\x0bM\xfbju1{\x1c\x18\xb1\xd1\xf5%\xeb\x98\xc8#8\xd7ug\x12\xb3\xa4\x17\x80\xefV\xf8\xb4\xd8c\xe8\x91\xa8\xc8Si\xe0a\xe0\xc1J}\xaa\x99_\x9aqV\xe6\x84\xdd\xcb\xb3Z\x99\xcc\x02#\x8fd\xbf\xa7\xcd\xe4\x80\x07\x80?\xb4\xad\xbc\xa8\x05\xcd\xd2:73\xa2\x16\xf2KWkn\xae\xa9A\xda\xae\x9aYQ\x0b\xfd2\x99;^\x8f\xc8)m\xbb\x95\xc2\x94\x1f\xcb\x0e\xbav\xa1\x19\xcb\x16J\xc6\xd1\xe9&\x7f\xadfzg\x11\xda\xd8\x05\xbb\x9e\x17\xda\x90\x9f\xdd\xd2\xect\x06\x18y\xd8?\xbc:/\xe6\x06\x1c\xdb]\xd4Rb\xb6\xa3\xc0G\xe8DD#N\x16-b\xd5\xa9J;\xa4P\x9e2\x94\xc4\xbdF6<%2\xc8\x84\x85\xc3\xcba1\x22O\xd2\x12l\xddy9\x8f\xe3\xc7\x84\x8c\xb2\x17\xbd-\xa2Q\xa5;\xc7\xafp\xbf\xc9\xb1X?S\xd9\x01\xfc\x1fb\xb3\xec0\x1bg\x13\xa4\xb07\xd8\x9b\xec\x08K\xc1:\xc1\x0e\x05\x0arm:\x15*\xd3}(\xf5\xf3\xbcOuT\x16:NP\xf2\xfc@}(\x88Q\xce\xd9\xef\x1a\xfa\xcd\x8f\xfe\x86\x9a\xb0K+\xf4#O\xd014[\xed\x13 \x07\x13hu7\xd2>\xa7kk?\xec\x0e<\xca\x06\xbb\xf8\xce\xed\xbekkt\xfcq\xf3\x8d\xdd\x8bm\xc76\xb0n\xc6\xb6\xc2\xf8\xd8\xaf\xb1-\xfcmR\x02;`z\x8a\x96\xa1\xca\xf0v\xc2\x09x#\x1d=\x5c\x01\xd3%\x0d\xebo\xe0Y\xd0\xdaR\xda\xb1\xa3\xa5\xea\xf9\x81\xd0#&\xc1?\xc8>\xcc\xd2\xb9\xe1\xd0\xaa\xfe\xa2\xfe\xa9n\xa8_\xa8\x17\xd4\xdf;j\x84\x19;\xa6$}*}+\xfd(}'}/\xfdL\x8atY\xba\x22\xfd$]\x95\xbe\x91.\x059\xbb\xef\xbd\x9f%\xd8{\xaf_a\x13\xdd\x8a]h\xd5k\x9f5'SN\xca{\xe4\x17\xe5\x94\xfc\xbc\xfc\xb2<\x1b\xb0\x14\xb9_\x1e\x93\xa7\xe4\xbd\xf0\xec\x09\xf6\xcd\x0c\xfd\x1d\xbd\x18t\x0a\xb3jM\xb5{-\xf1\x04\x184\x0f%\x06\x9d\x06\xd7\xc6T\xc5\x84\xabt\x16Y\xdb\x9f\x93\xa6R6\xc8\xc6\xd8\xf4#\xa7v\x5c\x9c\xe5\x96\x8ax:\x9e\x8a'H\x89\xef\x8bO\xc4\xc7\xe23\x02\xb7\x9e\xbc\xf8^\xf8&\xb0\xa6\x03\xf5\xfe\x93\x130::\xe0m,L%\xc83\xe2\x9d:qVE\xf4\x0at\x9b\xd0\xcd]~\x06\xdf\x0e\xa2I\xabv\xd66\xca\x15W\xd9\xaf\xaa\xaf)\x09|\xca\xb82]\xd5G\x87\x15\xcd4\x15\xcf\xe5(6w\xb8\xbd\xc2\x8b\xa3$\xbe\x83\x22\x8e\xe8A\xde\xfb\xbeEv\xdd\x0cm\xee[D\x87\xff\xc2;\xebVh[\xa8\x13}\xed\x10\xf5\xbf\x1a\xda\x86\xf0N|\xe63\xa2\x8b\x07\xf5\xba\xbd\xe2\xe7\xa3H\xe4\x06\x91S:\xb0\xdf\xfb\x1d\xe9K\xe2\xddt\xb7\xd1x\x80\xf7U\xcf'D;\x1f7\x1a\xff\xae7\x1a;_\x22\xff\x16\xd1e\xf3?Y\x00qxl\x1b+\x13\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x01riTXtXML:com.adobe.xmp\x00\x00\x00\x00\x00<x:xmpmeta xmlns:x=\x22adobe:ns:meta/\x22 x:xmptk=\x22XMP Core 4.4.0\x22>\x0a <rdf:RDF xmlns:rdf=\x22http://www.w3.org/1999/02/22-rdf-syntax-ns#\x22>\x0a <rdf:Description rdf:about=\x22\x22\x0a xmlns:xmp=\x22http://ns.adobe.com/xap/1.0/\x22>\x0a <xmp:CreatorTool>Adobe Photoshop CC (Macintosh)</xmp:CreatorTool>\x0a </rdf:Description>\x0a </rdf:RDF>\x0a</x:xmpmeta>\x0a\x9c\xb9F<\x00\x00\x01\x16IDAT8\x11\xa5RK\x8e\xc20\x0c\xb5QW\x15\xdb\xded\x04\xdc\x01$\xa4\x1e\xa1\x07\x1b\xba\xe9\xbe\x1c\xa1\xcb9F\xb7\xb3\x9fE?I\xb0Mm\x85\xce\xc0D\xc2\x12\xaa\x83\xf3\x9e\xdf\x8b\x8d!\x04x'6\xef\x80\x19\x9b\xc5\x04UU\x99\x1c\x1f<\x00\x9d\xea\xba\xc6\xf8\xce:G\xb5\xc0\xe0\xe3\xe9hu\xef\x88\x80\xa2m[h\x9a\xe6)\x89\x100\xb8(\x0ajh\x02\x8c\x08\x01\xa1\xef{p\xce\x017\xf3\xde\xdf\xbf\xa4\xf0\xda^\xd1,\xec\xf6;\x03\xad\x93\xbfj\x97\xcf\x0b \xe2\x87\x10\xb0_7\xbb5\xee\xe5\x99\x15Qle\x0a\xc1\xff\x96\xfe\x12ME\xb6Bq\xb7\xa0\x0f9\xcf\xf3\x7f8\xa9gY&ob\x04\x8b\x1c\x18\xc71\x99`i\x1a\xe4\x0dT\xc10\x0cI\x04y\x9e\xcb$L\xc1\xe2'Y\x01wQ\x8c(P\x0b\xd34%)\x88.\x05\xdb\xc4\xb2,\xc9\x09-\x0a\x8d\x94\xa7\xc2\x1d\xf4\xc7\xff\xeb\x22\xc9\x1d\xaau]w&\xa2o#`VZ\x8c\x03}r\xce\xa3\xd0\x19\xeb:\xf3\x99\xf3\x1f\x22\xfbz \x88@\xc9\xe9\x0d\xa1\xd7\xa7Ga\x0b4\xde\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01\x9a\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x01<IDATx\xda\x94S\xa1n\xc3P\x0c\xcc\xa2\xaahAo\xa4AE+iQ\xd9\xd0\xd0\xd0\xd8~`\x7f\xd7\x0f\x18\x1b*ZQPK\x12\x14\xf4\x86\x82\x8a\x16\xb4\xbb\xea<YV\xd4m\x96NO\xb1\xfd|g\xfb\xa5(\xfea9\xe7\xfb\xe8\xbb\xf9\xc3\xa5\x0a\xc7\x1aX\x01s`_\xd7ug\xf1\xd9/l\xbc\xb4\x90\xeb\xa0\x02\xf4u\x93\x0a\x02\xdb\x17p\x02Z\xe0\x01\xa8\x94\xc6\x82;\xa88\xff\x14\x08l\xac\xde!!\xbb\xc2\x09\xc7\x0b\xf0)\x15\x19\xf1\xc3\xa5\x05\x04_\xc5F;Z\xc0\x1b|\x03\xf2\x06\xa9I\x22\xbb\xe4\x95\xc0\xa0\x0f\xca]^\x99'%?\x02\x1b\xaa\xb0\x8d\xccT\xf5\xc9\xc9]\x82\xb1w\xd2\xad=\xb3w\xa7\xa2\xb3\x19\xb0\x8d\x06\xb8\xd5\xb0z1%?\x13\x98\x11%\xe5\xedl\x8d\xad\x06X)\x98l\x03\xb88:\xf6^m\xf0\xa4\x7f]*`\xfd\x8f\x9aI\x83\x8b\xc7p\xb9\xd0\x03\x1a\xb5\x0d\xe6\xadJ\x05\xcer\xb2\x8d\x1cz\x8e\xef\x84\x1b\xdb\xca\xf5\xe6_\x22\xdbx\xf6\xc9,\x8cs.u\x1b\x85H\xd2\x9b\xba\xf8\x129\xcc\xbd\xe6q\xa7\xd5-\xa4\x8e-\x0dQU\xfc\x17Z\xad-\xa9W\xb6\xf3\x11gq\xad\xc0I\xffB3\xc56e\xdf\x02\x0c\x00\xa9_\x7ff\x8fx\xa2\xf0\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01p\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x01\x12IDATx\xda\x94\x92\xa1\x12\xc20\x0c\x86\x9b\xddnnjj(\x14(\x14\x0a\x87\xda\x0b\xf0v\xbc\x03/\xc0\x03\x80\x9a\x1ajj\xaa\xa8)\x5c\xcdHv\x7fw\xa1t\xe3\xc8]\xaeY\xbb~\xf9\x93\xc6\x18\xd80\x0c9\xfb\x8e\xbd0\x7fX\xaa\xe25\xfb\x010\xc7\x8be\x7f\xcaJD\xfd\x1c\x80\x94\x82\x0aa\xcd\xbeb/\xb1f\xec\xb3@\xad@~\xee\x10\x17\x88ku&\xc0\xbd\xa8\xd4\x0a\x09\xd9\xe5\xc2)*\x91\xe8\xac\xbf\xf1\xaf\x007\xa2\xce\x03v\xa0_\xd8+(\xd0\xf6\x9c2\x12YU\xb2\xf3%\x94\xa8\xed\xc5\x07\x0d\xc7[\xec\xb5\xb8\xe83\xee\xf9\xdc\x03%\xc9-U5\xd6\x90\xdc\xe2\xa2d\xc9\xf8[\xeam9\x16\xc0Q%\x1cA\x89\xbc?:m#\xf5\xbb\xa0\xc9\xda\x9c(N@sKo\x1dd\xf56&L@\xb6K7\xa12\x8f4\xd6x\x05\xbf\xc6\xb8\x8c\xecY?H\x8f\xb9!QS\x17\xab\xbf\xff\x18\xe5`H\xc216\x88\xbdu\x0c\xb8~\x01\x22\xb5\xc7\x80bw\x064\xe6_\x13 z55\xf4-\xc0\x00:&\x85pD\xfd\xf7\xe4\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01\xf4\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x01\x96IDATx\xdabd@\x03\xac\xec\x1c\xac@J\x1b\x88e\x91\x84\xbf\x01\xf1\xe9\xdf?\x7f|BW\xcf\xc4\x80\x09\xac\x82\x02\x02L\x8f\x1e>\xa4v\xe3\xea\x15U\x10\xae\xab\xae\xd2\x03\x89\x03\x0d\xe7BW\xcc\x8cf\xbb\x84\x8b\x93\xa3\xc5\xdc\xb9s9TTT\xbe\x0b\x0a\x09\xfd\x02aS3\xb3\x7f\x1f?\xbc\x17>s\xf6\xec\xc7\x7f\x7f\xff\xbc\xc6\xe7\x02~\x03\x03C&!\xa0&dANN\xce\xbf\x0e\x0e\x0e`yb\xbc\x80\x15\xbc}\xf3\x16\xabZtA\x16\x06\x12\x01\xba\x01\xac\xbf\x7f\xfdb\xc6\xa6\xf0\xf3\x97\xcf\xff\x80\xd4oB\x06<\xde\xb0i\xd3\xbb\x03\xfb\xf7s#\x0b>{\xfa\x94s\xc9\x92\xa5\xefA\xf2x\x0d\x00\xc6\xf3\xdb\x87\x8f\x1e]\x5c\xbeb\xc5Wd\xf1\x1f?~0_\xbez\xf53P\xfe\x051\x81\xf8\xf1\xc6\x8d\x9b?\xd1\x05\x05\x04\x04X\x80\xd1\xac\x0dMh\xa8\xe9\x00$\xc8\xcc\xc2\xa2\x00\xc4\x9a@\x85\x1ai)\xc9R\x16\x96\x96\xf0\xa8\x04\xa5\x05eEE\x89\x7f\xff\xfe\x89]\xbfqC\x02\xa8\x8e\x0b\x88?\x01\xd3\xc4oF\xa0f>h\xea\x93uqv\xe6\xb0\xb2\xb2d\xd7\xd2\xd6\xfe\x84- \xbf\x7f\xff\xce|\xe5\xca\x15\xae%K\x96\xfcX\xb9j\xf5\xa3w\xef\xdf\x1fc\x06\x9a\xa4WVR\xac\xd7\xdb\xdb\xcbhnn\xfe[TL\xec'\xae(cee\xfd/--\xfd\xcb\xd3\xd3\xf3\xdf\xdf\xbf\x7fD\xf7\x1f8\xf0\x15\x14\x06\x12\xfe\xfe\xfe\xec\xa0\xd4FJ\xfc\xfb\xfa\xf8\xb0\x81\xf4\x82\xbc\xe0\x012\x9c\x81<\xf0\x9b\xf1\xff\xff\xff\x0c\x94\x00\x80\x00\x03\x00A\x9d\x93\x92\xf6Q\x1bj\x00\x00\x00\x00IEND\xaeB`\x82\x00\x008\xf2\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x008(iTXtXML:com.adobe.xmp\x00\x00\x00\x00\x00<?xpacket begin=\x22\xef\xbb\xbf\x22 id=\x22W5M0MpCehiHzreSzNTczkc9d\x22?>\x0a<x:xmpmeta xmlns:x=\x22adobe:ns:meta/\x22 x:xmptk=\x22Adobe XMP Core 5.6-c067 79.157747, 2015/03/30-23:40:42 \x22>\x0a <rdf:RDF xmlns:rdf=\x22http://www.w3.org/1999/02/22-rdf-syntax-ns#\x22>\x0a <rdf:Description rdf:about=\x22\x22\x0a xmlns:xmp=\x22http://ns.adobe.com/xap/1.0/\x22\x0a xmlns:dc=\x22http://purl.org/dc/elements/1.1/\x22\x0a xmlns:photoshop=\x22http://ns.adobe.com/photoshop/1.0/\x22\x0a xmlns:xmpMM=\x22http://ns.adobe.com/xap/1.0/mm/\x22\x0a xmlns:stEvt=\x22http://ns.adobe.com/xap/1.0/sType/ResourceEvent#\x22\x0a xmlns:tiff=\x22http://ns.adobe.com/tiff/1.0/\x22\x0a xmlns:exif=\x22http://ns.adobe.com/exif/1.0/\x22>\x0a <xmp:CreatorTool>Adobe Photoshop CC 2015 (Macintosh)</xmp:CreatorTool>\x0a <xmp:CreateDate>2015-07-08T10:34:29-04:00</xmp:CreateDate>\x0a <xmp:ModifyDate>2015-07-08T10:37:36-04:00</xmp:ModifyDate>\x0a <xmp:MetadataDate>2015-07-08T10:37:36-04:00</xmp:MetadataDate>\x0a <dc:format>image/png</dc:format>\x0a <photoshop:ColorMode>3</photoshop:ColorMode>\x0a <xmpMM:InstanceID>xmp.iid:1e61b505-ccec-4bbc-8463-625f4de6105e</xmpMM:InstanceID>\x0a <xmpMM:DocumentID>xmp.did:1e61b505-ccec-4bbc-8463-625f4de6105e</xmpMM:DocumentID>\x0a <xmpMM:OriginalDocumentID>xmp.did:1e61b505-ccec-4bbc-8463-625f4de6105e</xmpMM:OriginalDocumentID>\x0a <xmpMM:History>\x0a <rdf:Seq>\x0a <rdf:li rdf:parseType=\x22Resource\x22>\x0a <stEvt:action>created</stEvt:action>\x0a <stEvt:instanceID>xmp.iid:1e61b505-ccec-4bbc-8463-625f4de6105e</stEvt:instanceID>\x0a <stEvt:when>2015-07-08T10:34:29-04:00</stEvt:when>\x0a <stEvt:softwareAgent>Adobe Photoshop CC 2015 (Macintosh)</stEvt:softwareAgent>\x0a </rdf:li>\x0a </rdf:Seq>\x0a </xmpMM:History>\x0a <tiff:Orientation>1</tiff:Orientation>\x0a <tiff:XResolution>720000/10000</tiff:XResolution>\x0a <tiff:YResolution>720000/10000</tiff:YResolution>\x0a <tiff:ResolutionUnit>2</tiff:ResolutionUnit>\x0a <exif:ColorSpace>65535</exif:ColorSpace>\x0a <exif:PixelXDimension>16</exif:PixelXDimension>\x0a <exif:PixelYDimension>16</exif:PixelYDimension>\x0a </rdf:Description>\x0a </rdf:RDF>\x0a</x:xmpmeta>\x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a \x0a<?xpacket end=\x22w\x22?>\xdf\xa1\x8e\xe2\x00\x00\x00 cHRM\x00\x00z%\x00\x00\x80\x83\x00\x00\xf9\xff\x00\x00\x80\xe9\x00\x00u0\x00\x00\xea`\x00\x00:\x98\x00\x00\x17o\x92_\xc5F\x00\x00\x00DIDATx\xdab\xf8\xff\xff?\x03%\x98ap\x18\x00\x03\xff\xff\xff7\xff\xff\xff\x7f\x1e\x16l\xce\x80\x060\x0c\xc0\xa1\x11\x1f\xc6t\x01\xb1\x00\xc3\x00Rl\x1e5\x80\x86\x06\x98S\x94\x0e\x86vf\x02\x00\x00\x00\xff\xff\x03\x00t\xde\xde\xe3\xc8\x19\xf9\xc4\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01\x96\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x018IDATx\xda\xb4S\xbbN\xc40\x10\xb4\x1d;\xc1\x91\xc2!]\x04\xa18\x09]A\x03H4\xf4|\x00\x1d_Ls\xfc\x004\x88\x82\x86\x5c\x1a\x10\xe4q\x10\xf2b\x16%Qt\x8a9\x04b\xa4\xd1:\x9b\x9d\xf1:Y\xf3\xa6i\x18\xe7\x9c\xad#\x0cC\x17\xe1\xc8\xf7\xfdS\xa5\xd4\x15j\xee\xd8\x08\xe4X\x12\xe29\xc2!\xa8 \xf4\x10\xcf\xb1\xd1\x09\xe25\x9eC\xa3\x01\x84S\x84cp{\xc4\x97\xde]\xc0h\x89\xb8\x80\xd1So\x00\xa1\xa2v\xc1\x19\xdb\x8c}\xf0\x12Ft\xa4\x85h\x93g?\x14\x0fAG\xf4;\x83\x15\xfb%:\x83\xb7\xbf\x1a\xb0\xff2p\xf3<gUUm\x99\x0a\xa4!\xaf\xf1\x9b\x0a\xf0!M\xd3{\x98L\xb4\xd63\xdb\xb6'\x96e\xbd\x0f\xea\xecu\x03\x1a\x1c\x09>b\xdd\x17\x16E\xf1J\x94R\xba\xc0\x1c\x939\xc5:\xa5\xd9\xe8\x0c\x04D\xba\x1d\x8e\x17S\xbbeY\xae\xe28\xbe\x11B(tt\xe08\xce\xc7\x97\x01\xed\x08\xd2\x9f\xd8\x01\xadM\x1f\xae\xae\xeb\xe7,\xcbn=\xcfK\xf8\xf02EQd\xb5#\xbbG\xe7\x1b\xd1&\xe02\x08\x82\xa4Kp\xd3m\x84\x19u\xb3\x0bzc\xc2\x1ed\xf0\x1d\xda\xae\x8c\xf8\x14`\x00\x05\xecl1yCb\x8d\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01\x06\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x00\xa8IDATx\xdab\xfc\xff\xff?\x03%\x80\x89\x81R@\xaa\x0b\x80\xea\xa5\x808\x12\x88uAzYH\xd4\xac\x06\xa4\x1c\xa0\x5c6\x92\xbc\x80\xa6\x19\x04\xde\xa2\x18\x00T\xc0\x06u\x1e/\x11\x9ao122>\x001X`\x9a\x81T0\x10\x834\x83$v\x11\xd0|\x00=\x16t\xa1\x9aA@\x8aX\xcdp\x17\xa0\x016\xa8\x8b\x14\x08i\xc6e\x00\x08\xf8\x02\xb10!\xcd\xf8b\x01Y\xf3\x03\x5c\x9a\x91\x0d\xf8\x8cC\x1e\x14U\x07\xf0E/\x0br\x9c\xa2\x81[\xf8lFq\x01P!\xcc\xa6_P\xf1\xb3\xc4h\xc6\x9a\x17\xa01@\xb4^FJ\xb33@\x80\x01\x00+\xf8V\x18\x86{\xf1\x5c\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x04\x8e\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x06bKGD\x00P\x00P\x00P\xc6\x87\x0b\xf0\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xe0\x07\x0f\x0f\x05\x1f\xcb\xf0\xc3\xe9\x00\x00\x04\x1bIDAT8\x11\x01\x10\x04\xef\xfb\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ttt\x1b\x00\x00\x00m\xfe\xfe\xfeI\xfe\xfe\xff&\xff\xff\xfe\x00\xfe\xfe\xfd\xda\xfe\xfe\xff\xb7\xfe\xfe\xfe\x93\x97\x97\x97\xe5\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00tttd\x00\x00\x00\xdb\xfe\xfe\xff\x5c\xff\xff\xff\xa6\xff\xfe\xfe7\xfe\xfe\xfe7\xfe\xfe\xff\xa6\xfe\xfe\xff\x5c\xfe\xff\xfe\xdbfefd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00tttd\x00\x00\x00\x9b\xfe\xff\xff\x9c\xfe\xfd\xfen\x90\x90\x8f\xf7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00ggg\x09\xfd\xfd\xfd\xbc\xff\xff\x00:\xff\xfe\xfee\x9e\x9f\x9e\x9c\x00\x00\x00\x00\x01\x00\x00\x00\x00ttt\x1b\x00\x00\x00\xdb\xfe\xfe\xff\xa5\x8e\x8e\x8de\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00dddl\x00\x00\x00>\xff\xff\xff4\xfc\xfd\xfd\x18\xff\xfe\xfe%\xa2\xa2\xa2\xe5\x02\x00\x00\x00\x00\x00\x00\x00m\xfe\xfe\xfe\xee\xfe\xff\xfen\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00dddl\x00\x00\x00>\x00\x00\x00\xc2\xfd\xfd\xfd+\xff\xfe\xfe\xee\xfe\xfe\xfem\x00\x00\x00\x00\x02\x00\x00\x00\x00\xff\xff\xffI\xfe\xff\xfe\x93\x90\x8f\x8f\xf7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00dddl\x00\x00\x00>\x00\x00\x00\xc2\x9c\x9c\x9c\x94\xa0\xa0\xa0\xf7\xff\xff\xff\x93\xfe\xfe\xfeI\x00\x00\x00\x00\x02\x00\x00\x00\x00\xfd\xfe\xfe&\xff\xfe\xff\xb7\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00dddl\x00\x00\x00>\x00\x00\x00\xc2\x9c\x9c\x9c\x94\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfd\xfd\xb7\xff\xfe\xfe&\x00\x00\x00\x00\x02\x00\x00\x00\x00\xff\xfe\xfe\x00\xfe\xfe\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00dddl\x00\x00\x00>\x00\x00\x00\xc2\x9c\x9c\x9c\x94\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xff\xfe\x00\xfe\xff\xff\x00\x00\x00\x00\x00\x02\x00\x00\x00\x00\xfe\xfe\xfe\xda\xfe\xfe\xfeI\x00\x00\x00\x00\x00\x00\x00\x00dddl\x00\x00\x00>\x00\x00\x00\xc2\x9c\x9c\x9c\x94\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfe\xffI\xfe\xfe\xfe\xda\x00\x00\x00\x00\x04\x00\x00\x00\x00\xfe\xfe\xfe\xb7\xff\xff\xffmghh\x92\xfd\xfc\xfcc\x00\x00\x00>\x00\x00\x00\xc2\x9c\x9c\x9c\x94\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00WWV\x09\xfe\xfe\xfem\xfe\xfe\xfe\xa4\x00\x00\x00\x00\x02\x00\x00\x00\x00\xff\xfe\xfe\x93\xfd\xfd\xfd\x12\xfd\xfc\xfc\xbc\x00\x00\x00>\x00\x00\x00\xc2\x9c\x9c\x9c\x94\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfe\xff\x92\xfe\xfe\xfe\x12\xfe\xfe\xfe\x93\x00\x00\x00\x00\x03\x00\x00\x00\x00\xcb\xcc\xcc\xf3222\xe9\x00\x00\x00k\xff\xff\xff\xf1\xfd\xfd\xfdq\xd0\xd0\xd0\xfc\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00VWW\x09***\x97\xfe\xfe\xfed\xfe\xfe\xffj\xaf\xaf\xaf\xc1\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00bbbd\xfe\xfe\xfe\x92\xfe\xfe\xfe\xee\xff\xfe\xff\x93\xfe\xfe\xfe\xb7\xfe\xff\xfd\x00\xfe\xfe\xffI\xfe\xfe\xfem\xfe\xfe\xfe\x12\xff\xfe\xffn\xae\xaf\xae\x9c\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00^_^\x1b\xfe\xfe\xfem\xff\xfd\xfeI\xfe\xff\xfe&\xfe\xfe\xff\x00\xfe\xfe\xfe\xda\xfe\xff\xff\xb7\xff\xfd\xfe\x93\xae\xaf\xae\xe5\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00{\xa3Rn\x88C\xdf\xe9\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01\x96\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x018IDATx\xda\x94\x93;K\xc4@\x14\x853k\xb4PB\xc0Gc\xa1\x8d\x08j!\x8a\x8d\x8dX,\xec\xfe\x03\xfd\x85v\x8b\xb5\xad\xb5.\xf8B\xb0\x10,\xdcF,D\xdc\xc2U\xe3w\xe0\x08aL\xe2:\xf01\xcc\xdc\xcc\xb97w\xce$I4\x8a\xa2\xc8\xe0\x00\xf6\x931F+>\xcct\x08_\xb0\xcez\xf7_\x02!\x84W\xa6\x0f\xb8\x87>l!\xb2\xda$\x90:\xf3\x1c\xd3;\xa8\x82i\xc7F\x16i\x13\x97\xf8]\xa5\x803t`\x08\x9fp\xebX\x0e\x9bp\xd6$\xa2\x0a\xf6\xe0\x11\x1e\xa2\xd8\x0c\x0c`\xa7ID=8\x86yg,\x0f\x1d~\xf2\xbe\x9ay\x05\xdd\xb8'-\x14\x9f\x99{\xb0\x11\x89\xe4\xaen\xd2kU\xa2\xec\x9d\xb2H(]\xe1O/n\xfc\xef\xb9\x0f\xab\x92sX\x81\x05\xb8p\xb2#%\x0f\x91\x0f$\xd2\x85kg\xd4M\x9c:\xbc\x08o\xf0bq\x09\xf6B\x85\x13%\xd2\xf6?\xeb\xa3\xcb\x1a\x0b,\xc3l\x1a\xef\xaa\xcb\xea\xb6E\xfaQX\x99\x97`\xc2~9I\xab\xa4+DF\x0e\xad\xf9\xd6\xe4\xd8)\xf5 \xad\xb3h\x8d\xc8\x90\xfd\xc1/+\xff!\x22\x9bo\xdb\x13rn\xe673\xfe\xd0\xd3\xf6\x13\xcf\xe2\xd8\xb7\x00\x03\x00\x97\xf9\x84\x1d\x0e\x19\xc9m\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01A\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x00\xe3IDATx\xdab\xfc\xff\xff?\x03\x08\xb0qp\xfa2\x10\x06\x8f\x7f\xff\xfcq\x01\xc4\x80\xe9cB\x96\xfd\xf5\xe3\xbb$\x1e\xfcc\xea\xe4I!\xac\xec\x1c\x06\xc8zX\x18H\x00\xa9\xa9\xa9\xa2@*$;7\x0f\xc4\x05\xbb\x84\x11\xd9\x0b \x9b\xf0\xe8\xbf\x0fc\x00\xd5r\x00\xd5n\xc6\xf0\x0228q\xfc8?\x9a\x90\x22\x12\x86\x03\x9c\x06\x84ED\xfe\xec\xee\xea\x92\xfe\xf2\xe5\x0b^o\xe24\xe0\xc5\xcb\x97?\xaa\xeb\xea\x9f\xc6\xc5\xc6\x09cq\x0da\x03``\xcb\xf6\xed/A\xaeY\xb2x\xb14Y\x06\xf0\xf0\xf0\xb0$\xc6\xc7\x09\xbb\xb9\xb9\xbd\xc5&\x8f\xd7\x7fV\x96\x16\xfc\x15ee\x1c\x1e\x9e\x9eOq\xa9\xc1i@jr\x92xII\xc9\x17EE\xc5\x97\xf8,!%\x1d0 \xa5\x83\xe7\x04\xd3\x01\xb1\x80b\x03X\xd0\x9dF\xaa\x01\xf00 \x17\x00\x04\x18\x00f\xbdi\xf2\xa2\x22\x95M\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x01W\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x00\xf9IDATx\xda\xb4SA\x0a\xc20\x10l\xb4\x15\x15\xaa\xa0\x08\x16<X/>\xc3\x83g\xbf\xe9\x0b<y\xf2\x1bz\x11O\xf6 \x88\x8a\x0a\xb5\xb4\xce\xc2FB\xbb\x09\x8880$mw\xb6\xb3\x9b\x8d*\x8a\xc2\xfb;\xf0\x93>\xd8\x96\xbe)\x87\x88\x0418\x04i\xff\x02\xd7`@{\xa5T^I\x00Q\xc0\x82\x09\xd8\x05}\xf0\xc9|\x80\x09\x98\x81w$\xb8x\x1c@B\x12\x8d\xc0\x81\x11|\x02\xcf\x16\x83\x8dO\x09\x10\xcf\xd9\x22\x09\x0e\x9c(\x16D\xda\x91\x069\xb9\xf9\x5c\x13\xa1\xcek\x8f\x83%d\xa5\xe7\xb0\xc6u\x11Z\x96 Bjk\xb6\x99\xa0\xe38\xc9%\xb8\x13\x13\xa0\x9b\x097\x8d\xfe\x1c\x8ag\xadT\x0an\xb0]I\x0e<v\x91\x1beHs1\xc62+\xbf\xd7]\xdd\xf3\xd97-\xe2\x05\x96\xc8\xd6\x03\xb2H%\x5c\x1d}\x88\x5cM\xd4\xd8\xf2\x10\xf9\xdf\xdc\x13%\xd8\xa5)\x9b\x9a\xd3f\xe0XI\xf0\xebu~\x0b0\x00#rI;*KB\x98\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x00\xdc\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x10\x00\x00\x00\x10\x08\x06\x00\x00\x00\x1f\xf3\xffa\x00\x00\x00\x19tEXtSoftware\x00Adobe ImageReadyq\xc9e<\x00\x00\x00~IDATx\xdab\xfc\xff\xff?\x03%\x80\x05D022z\x00)V2\xf4\xfff\x812X\x81.\x91$U7\xd0\xe2\xe7L\x0c\x14\x82Q\x03F\x0d@6\xe07(Ua\xc3\x91Q\xd1\xbfn\xdd\xba\xcd\x8b\x9c\xfa`\x18\xa4\x8f\x81\x88\xccd`ie\x15u\xf2\xd4\xe9\x12\xa0\xda4 \xdf\x17E\x96\xc8\xdc(+**\x16\xb8k\xf7\x9eR\x10\x8d\x92\x1f@\x06\x00\x9dC\x8c!\x12 \xd7@\xd9;`\x82\x00\x01\x06\x00w\x1e,\xd8\xda@0\xb5\x00\x00\x00\x00IEND\xaeB`\x82\x00\x00\x03\x9c\x89PNG\x0d\x0a\x1a\x0a\x00\x00\x00\x0dIHDR\x00\x00\x00\x0e\x00\x00\x00\x0e\x08\x06\x00\x00\x00\x1fH-\xd1\x00\x00\x00\x06bKGD\x00\xff\x00\xff\x00\xff\xa0\xbd\xa7\x93\x00\x00\x00\x09pHYs\x00\x00\x0b\x13\x00\x00\x0b\x13\x01\x00\x9a\x9c\x18\x00\x00\x00\x07tIME\x07\xe0\x08\x1f\x0e*\x0c\x8e\xd9\x10#\x00\x00\x03)IDAT(\x15\x01\x1e\x03\xe1\xfc\x01\x00\x00\x00\x00\x14\x14\x143\x03\x03\x03M\x07\x07\x07\xef\xfd\xfd\xfd\xf6\x12\x12\x12+\xd3\xd3\xd3p\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00$$$\x89\x0e\x0e\x0e^\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x19\x19\x19K)))\x9f\x17\x17\x17\x12\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00,,,!ttt\xb6\xcd\xcd\xcd(&&&\x00\x0d\x0d\x0d\xc5\x83\x83\x83=\xdd\xdd\xdd\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x1c\x1c\x1c\x03\x14\x14\x142yyy\xb1+++&\xfe\xfe\xfe\x00\x1f\x1f\x1f\x00!!!\x14)))i\xd1\xd1\xd1\x92\xf0\xf0\xf0\xfe\x00\x00\x00\x00\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x18\x18\x18 |||\xbd)))\x1e\xc6\xc6\xc6\x00\xcd\xcd\xcd\x00\xff\xff\xff\x00\x1d\x1d\x1d\x05\x16\x16\x16\x08+++a\xc6\xc6\xc6\x94\xf5\xf5\xf5\xff\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\xff\xff\xff\x11\x00\x00\x00\x0d\xfe\xfe\xfe\x00\xfa\xfa\xfa\x00...\x00\x08\x08\x08\x00\xfe\xfe\xfe\x00\xfe\xfe\xfe\x06&&& \x8a\x8a\x8a\xd7(((\x02\x00\x00\x00\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x07\x07\x07\x1c\x00\x00\x00\x03\xfc\xfc\xfc\xf8777\x00111\x00111\x00\xfe\xfe\xfe\x00\xfc\xfc\xfc\x00\xfd\xfd\xfd\x00$$$%xxx\xd6\x00\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x00\xfb\xfb\xfb\xd7\x00\x00\x00\xde\xfb\xfb\xfb*\x02\x02\x02\x00\x00\x00\x00\x00\xfd\xfd\xfd\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00 $\xa0\xa0\xa0\xc3\x03\x00\x00\x00\x00\x00\x00\x00\x00\xed\xed\xed\xee\xfe\xfe\xfe\xbb\x1a\x1a\x1aA\x0b\x0b\x0b\x16\xff\xff\xff\x00\xfe\xfe\xfe\x00\xfe\xfe\xfe\x00\xfe\xfe\xfe\x00\xfe\xfe\xfe\x00\xfe\xfe\xfe\x00\xfc\xfc\xfc\x02\xf3\xf3\xf3\x02\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xd9\xd9\xd9\xee\xe5\xe5\xe5\xcf\x22\x22\x22I\x0c\x0c\x0c\x0e\xff\xff\xff\x00\xfe\xfe\xfe\x00\xfe\xfe\xfe\x00\xfe\xfe\xfe\x00\xff\xff\xff\x00\xe7\xe7\xe7\xf4\x87\x87\x87\xa4\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xf0\xf0\xf0\xf8\xf1\xf1\xf1\xef###.\x09\x09\x09\x0e\xff\xff\xff\x00\xff\xff\xff\x00\x00\x00\x00\x00\xe9\xe9\xe9\xfa\x96\x96\x96\xb8\xdb\xdb\xdbc\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfb\xfb\xfb\xf6\xe3\xe3\xe3\xee\xdc\xdc\xdc\xe7///4\x09\x09\x09\x0a\x02\x02\x02\x01\xea\xea\xea\x00\x88\x88\x88\x9e\xd2\xd2\xd2^\xfb\xfb\xfb\xff\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xfe\xfe\xfe\xf1\xf6\xf6\xf6\xfe\xe9\xe9\xe9\xe0&&&+\xf3\xf3\xf3\xfc\x98\x98\x98z\xd1\xd1\xd1|\x00\x00\x00\xff\x00\x00\x00\x00\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\xff\xfa\xfa\xfa\xe6\xec\xec\xec\xea\x08\x08\x08\xff\xc4\xc4\xc4\x87\xc5\xc5\xc5\xab\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00`\x02\xfe\x80\x1cp?j\x00\x00\x00\x00IEND\xaeB`\x82"
qt_resource_name = "\x00\x19\x0dU\x1e\xd3\x00t\x00k\x00-\x00f\x00r\x00a\x00m\x00e\x00w\x00o\x00r\x00k\x00-\x00s\x00h\x00o\x00t\x00g\x00u\x00n\x00u\x00t\x00i\x00l\x00s\x00\x1b\x00\x8d\xb4\xc7\x00i\x00c\x00o\x00n\x00_\x00P\x00u\x00b\x00l\x00i\x00s\x00h\x00e\x00d\x00F\x00i\x00l\x00e\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x13\x04\xcf\x90\xc7\x00i\x00c\x00o\x00n\x00_\x00G\x00r\x00o\x00u\x00p\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x16\x01\x11\xcd\xc7\x00i\x00c\x00o\x00n\x00_\x00S\x00e\x00q\x00u\x00e\x00n\x00c\x00e\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x1f\x05\xcb\x09\x07\x00i\x00c\x00o\x00n\x00_\x00T\x00a\x00n\x00k\x00P\x00u\x00b\x00l\x00i\x00s\x00h\x00e\x00d\x00F\x00i\x00l\x00e\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x18\x0e\xddk\xc7\x00i\x00c\x00o\x00n\x00_\x00C\x00l\x00i\x00e\x00n\x00t\x00U\x00s\x00e\x00r\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x1b\x07\xacsG\x00i\x00c\x00o\x00n\x00_\x00E\x00v\x00e\x00n\x00t\x00L\x00o\x00g\x00E\x00n\x00t\x00r\x00y\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x0f\x06CM\x87\x00i\x00c\x00o\x00n\x00_\x00F\x00o\x00l\x00d\x00e\x00r\x00.\x00p\x00n\x00g\x00\x12\x00\x8a\x8d\x07\x00i\x00c\x00o\x00n\x00_\x00S\x00h\x00o\x00t\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x15\x00\xed\x84\xa7\x00i\x00c\x00o\x00n\x00_\x00P\x00r\x00o\x00j\x00e\x00c\x00t\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x17\x0d+.\xe7\x00i\x00c\x00o\x00n\x00_\x00H\x00u\x00m\x00a\x00n\x00U\x00s\x00e\x00r\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x14\x04F\x1fG\x00i\x00c\x00o\x00n\x00_\x00F\x00o\x00l\x00d\x00e\x00r\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x13\x03.\x11G\x00i\x00c\x00o\x00n\x00_\x00A\x00s\x00s\x00e\x00t\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x12\x06s\xed\x07\x00i\x00c\x00o\x00n\x00_\x00T\x00a\x00s\x00k\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x12\x09.\xad'\x00i\x00c\x00o\x00n\x00_\x00N\x00o\x00n\x00e\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x14\x05&\x00'\x00i\x00c\x00o\x00n\x00_\x00T\x00i\x00c\x00k\x00e\x00t\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x16\x0f\xb4\xed'\x00i\x00c\x00o\x00n\x00_\x00P\x00l\x00a\x00y\x00l\x00i\x00s\x00t\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x15\x03\x92\x13G\x00i\x00c\x00o\x00n\x00_\x00V\x00e\x00r\x00s\x00i\x00o\x00n\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x12\x09\xb2\xad'\x00i\x00c\x00o\x00n\x00_\x00N\x00o\x00t\x00e\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g\x00\x11\x00\x7fe\x07\x00i\x00c\x00o\x00n\x00_\x00T\x00a\x00g\x00_\x00d\x00a\x00r\x00k\x00.\x00p\x00n\x00g"
qt_resource_struct = "\x00\x00\x00\x00\x00\x02\x00\x00\x00\x01\x00\x00\x00\x01\x00\x00\x00\x00\x00\x02\x00\x00\x00\x13\x00\x00\x00\x02\x00\x00\x03\xa2\x00\x00\x00\x00\x00\x01\x00\x00bQ\x00\x00\x01\xac\x00\x00\x00\x00\x00\x01\x00\x00\x18\x01\x00\x00\x008\x00\x00\x00\x00\x00\x01\x00\x00\x00\x00\x00\x00\x01\xd6\x00\x00\x00\x00\x00\x01\x00\x00\x19\x9f\x00\x00\x00\xa0\x00\x00\x00\x00\x00\x01\x00\x00\x04\xd2\x00\x00\x02h\x00\x00\x00\x00\x00\x01\x00\x00V\x01\x00\x00\x03H\x00\x00\x00\x00\x00\x01\x00\x00`\x16\x00\x00\x02:\x00\x00\x00\x00\x00\x01\x00\x00\x1d\x0b\x00\x00\x00t\x00\x00\x00\x00\x00\x01\x00\x00\x03\xa0\x00\x00\x02\xe8\x00\x00\x00\x00\x00\x01\x00\x00]7\x00\x00\x00\xd2\x00\x00\x00\x00\x00\x01\x00\x00\x06}\x00\x00\x01\x88\x00\x00\x00\x00\x00\x01\x00\x00\x10\xeb\x00\x00\x02\x94\x00\x00\x00\x00\x00\x01\x00\x00W\x9b\x00\x00\x01L\x00\x00\x00\x00\x00\x01\x00\x00\x0c\x84\x00\x00\x02\xbe\x00\x00\x00\x00\x00\x01\x00\x00X\xa5\x00\x00\x03x\x00\x00\x00\x00\x00\x01\x00\x00aq\x00\x00\x02\x06\x00\x00\x00\x00\x00\x01\x00\x00\x1b\x13\x00\x00\x01\x16\x00\x00\x00\x00\x00\x01\x00\x00\x0a\x1d\x00\x00\x03\x16\x00\x00\x00\x00\x00\x01\x00\x00^\xd1"
def qInitResources():
QtCore.qRegisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
def qCleanupResources():
QtCore.qUnregisterResourceData(0x01, qt_resource_struct, qt_resource_name, qt_resource_data)
qInitResources()
| 2,548
| 49,341
| 0.578474
| 9,227
| 53,508
| 3.345508
| 0.128644
| 0.252098
| 0.275811
| 0.267453
| 0.492986
| 0.481486
| 0.467913
| 0.456089
| 0.422204
| 0.403803
| 0
| 0.292553
| 0.234825
| 53,508
| 20
| 49,342
| 2,675.4
| 0.461398
| 0.002785
| 0
| 0
| 0
| 0.333333
| 0.993046
| 0.725502
| 0
| 0
| 0.00015
| 0
| 0
| 1
| 0.222222
| false
| 0
| 0.111111
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
43b35c934cd5374986b7125430169a92f432ee0c
| 2,388
|
gyp
|
Python
|
chrome/browser/resources/history/compiled_resources2.gyp
|
google-ar/chromium
|
2441c86a5fd975f09a6c30cddb57dfb7fc239699
|
[
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 777
|
2017-08-29T15:15:32.000Z
|
2022-03-21T05:29:41.000Z
|
chrome/browser/resources/history/compiled_resources2.gyp
|
harrymarkovskiy/WebARonARCore
|
2441c86a5fd975f09a6c30cddb57dfb7fc239699
|
[
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 66
|
2017-08-30T18:31:18.000Z
|
2021-08-02T10:59:35.000Z
|
chrome/browser/resources/history/compiled_resources2.gyp
|
harrymarkovskiy/WebARonARCore
|
2441c86a5fd975f09a6c30cddb57dfb7fc239699
|
[
"Apache-2.0",
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 123
|
2017-08-30T01:19:34.000Z
|
2022-03-17T22:55:31.000Z
|
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
{
'targets': [
{
'target_name': 'externs',
'includes': ['../../../../third_party/closure_compiler/include_js.gypi'],
},
{
'target_name': 'history_focus_manager',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:focus_manager',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
{
'target_name': 'history',
'dependencies': [
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:action_link',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:assert',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:cr',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:event_tracker',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:icon',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:load_time_data',
'<(DEPTH)/ui/webui/resources/js/compiled_resources2.gyp:util',
'<(DEPTH)/ui/webui/resources/js/cr/compiled_resources2.gyp:ui',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:alert_overlay',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:command',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:focus_grid',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:focus_outline_manager',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:focus_row',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:menu',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:menu_button',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:menu_item',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:overlay',
'<(DEPTH)/ui/webui/resources/js/cr/ui/compiled_resources2.gyp:position_util',
'history_focus_manager',
'<(EXTERNS_GYP):chrome_send',
'externs',
],
'includes': ['../../../../third_party/closure_compiler/compile_js2.gypi'],
},
],
}
| 49.75
| 93
| 0.667085
| 299
| 2,388
| 5.153846
| 0.247492
| 0.095393
| 0.16353
| 0.286178
| 0.760545
| 0.760545
| 0.730045
| 0.730045
| 0.583387
| 0.53926
| 0
| 0.013333
| 0.15201
| 2,388
| 47
| 94
| 50.808511
| 0.747654
| 0.064908
| 0
| 0.25
| 0
| 0
| 0.785105
| 0.736205
| 0
| 0
| 0
| 0
| 0.022727
| 1
| 0
| true
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43d2ceb07bc08a2e918db87d2f47a0f6b9722a75
| 4,363
|
py
|
Python
|
test/ResultsAndPrizes/6x49/test_6x49_results_for_several_draws.py
|
FearFactor1/SPA
|
a05aaa924c5bebb52cd508ebdf7fd3b81c49fac7
|
[
"Apache-2.0"
] | 1
|
2019-12-05T06:50:54.000Z
|
2019-12-05T06:50:54.000Z
|
test/ResultsAndPrizes/6x49/test_6x49_results_for_several_draws.py
|
FearFactor1/SPA
|
a05aaa924c5bebb52cd508ebdf7fd3b81c49fac7
|
[
"Apache-2.0"
] | null | null | null |
test/ResultsAndPrizes/6x49/test_6x49_results_for_several_draws.py
|
FearFactor1/SPA
|
a05aaa924c5bebb52cd508ebdf7fd3b81c49fac7
|
[
"Apache-2.0"
] | null | null | null |
# 6из49 + Результаты нескольких тиражей
def test_6x49_results_for_several_draws(app):
app.ResultAndPrizes.open_page_results_and_prizes()
app.ResultAndPrizes.click_game_6x49()
app.ResultAndPrizes.click_results_for_several_draws()
app.ResultAndPrizes.click_ok_for_several_draws_modal_window()
app.ResultAndPrizes.button_get_report_winners()
app.ResultAndPrizes.parser_report_text_winners()
assert "РЕЗУЛЬТАТЫ ТИРАЖА" in app.ResultAndPrizes.parser_report_text_winners()
assert "ЛОТО 6/49 - Тираж 35325 :" in app.ResultAndPrizes.parser_report_text_winners()
assert "18/01/2018, 17:17:00 ЛОК" in app.ResultAndPrizes.parser_report_text_winners()
assert "Кат." and "Выигрыш руб." and "Кол-во" and "Всего" in app.ResultAndPrizes.parser_report_text_winners()
assert "6/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "5+1" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "4/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "3/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "ЛОТО 6/49 - Тираж 35324 :" in app.ResultAndPrizes.parser_report_text_winners()
assert "18/01/2018, 17:02:00 ЛОК" in app.ResultAndPrizes.parser_report_text_winners()
assert "Кат." and "Выигрыш руб." and "Кол-во" and "Всего" in app.ResultAndPrizes.parser_report_text_winners()
assert "6/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "5+1" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "4/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "3/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "ЛОТО 6/49 - Тираж 35323 :" in app.ResultAndPrizes.parser_report_text_winners()
assert "18/01/2018, 16:47:00 ЛОК" in app.ResultAndPrizes.parser_report_text_winners()
assert "Кат." and "Выигрыш руб." and "Кол-во" and "Всего" in app.ResultAndPrizes.parser_report_text_winners()
assert "6/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "5+1" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "4/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "3/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "ЛОТО 6/49 - Тираж 35322 :" in app.ResultAndPrizes.parser_report_text_winners()
assert "18/01/2018, 16:32:00 ЛОК" in app.ResultAndPrizes.parser_report_text_winners()
assert "Кат." and "Выигрыш руб." and "Кол-во" and "Всего" in app.ResultAndPrizes.parser_report_text_winners()
assert "6/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "5+1" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "4/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "3/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "ЛОТО 6/49 - Тираж 35321 :" in app.ResultAndPrizes.parser_report_text_winners()
assert "18/01/2018, 16:17:00 ЛОК" in app.ResultAndPrizes.parser_report_text_winners()
assert "Кат." and "Выигрыш руб." and "Кол-во" and "Всего" in app.ResultAndPrizes.parser_report_text_winners()
assert "6/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "5+1" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "5/6" and "0" and "0" and "0" in app.ResultAndPrizes.parser_report_text_winners()
assert "4/6" and "110908" and "10" and "1109080" in app.ResultAndPrizes.parser_report_text_winners()
assert "3/6" and "375" and "717" and "268875" in app.ResultAndPrizes.parser_report_text_winners()
app.ResultAndPrizes.comeback_main_page()
| 82.320755
| 113
| 0.740316
| 692
| 4,363
| 4.447977
| 0.099711
| 0.089669
| 0.104613
| 0.119558
| 0.887264
| 0.87102
| 0.87102
| 0.84178
| 0.84178
| 0.84178
| 0
| 0.07031
| 0.136145
| 4,363
| 53
| 114
| 82.320755
| 0.746352
| 0.00848
| 0
| 0.571429
| 0
| 0
| 0.131329
| 0
| 0
| 0
| 0
| 0
| 0.836735
| 1
| 0.020408
| false
| 0
| 0
| 0
| 0.020408
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
43d859d3f83f1c93b101ddf6a4deacb0f3bb18c2
| 1,799
|
py
|
Python
|
tests/test_parse_project_name.py
|
RaitoBezarius/mathlib-tools
|
a3e432f33b8f9dbdc883041a143bcec4cda30200
|
[
"Apache-2.0"
] | null | null | null |
tests/test_parse_project_name.py
|
RaitoBezarius/mathlib-tools
|
a3e432f33b8f9dbdc883041a143bcec4cda30200
|
[
"Apache-2.0"
] | null | null | null |
tests/test_parse_project_name.py
|
RaitoBezarius/mathlib-tools
|
a3e432f33b8f9dbdc883041a143bcec4cda30200
|
[
"Apache-2.0"
] | null | null | null |
from mathlibtools.leanproject import parse_project_name as P
def test_name():
name, url, branch = P('tutorials')
assert name == 'tutorials'
assert url == 'git@github.com:leanprover-community/tutorials.git'
assert branch == ''
def test_org_name():
name, url, branch = P('leanprover-community/tutorials')
assert name == 'tutorials'
assert url == 'git@github.com:leanprover-community/tutorials.git'
assert branch == ''
def test_https():
name, url, branch = P('https://github.com/leanprover-community/tutorials.git')
assert name == 'tutorials'
assert url == 'https://github.com/leanprover-community/tutorials.git'
assert branch == ''
def test_ssh():
name, url, branch = P('git@github.com:leanprover-community/tutorials.git')
assert name == 'tutorials'
assert url == 'git@github.com:leanprover-community/tutorials.git'
assert branch == ''
def test_name_branch():
name, url, branch = P('tutorials:foo')
assert name == 'tutorials'
assert url == 'git@github.com:leanprover-community/tutorials.git'
assert branch == 'foo'
def test_org_name_branch():
name, url, branch = P('leanprover-community/tutorials:foo')
assert name == 'tutorials'
assert url == 'git@github.com:leanprover-community/tutorials.git'
assert branch == 'foo'
def test_https_branch():
name, url, branch = P('https://github.com/leanprover-community/tutorials.git:foo')
assert name == 'tutorials'
assert url == 'https://github.com/leanprover-community/tutorials.git'
assert branch == 'foo'
def test_ssh_branch():
name, url, branch = P('git@github.com:leanprover-community/tutorials.git:foo')
assert name == 'tutorials'
assert url == 'git@github.com:leanprover-community/tutorials.git'
assert branch == 'foo'
| 35.98
| 86
| 0.689272
| 227
| 1,799
| 5.39207
| 0.105727
| 0.21732
| 0.320261
| 0.27451
| 0.933824
| 0.906046
| 0.878268
| 0.824346
| 0.824346
| 0.824346
| 0
| 0
| 0.163424
| 1,799
| 49
| 87
| 36.714286
| 0.813289
| 0
| 0
| 0.585366
| 0
| 0
| 0.434686
| 0.255698
| 0
| 0
| 0
| 0
| 0.585366
| 1
| 0.195122
| true
| 0
| 0.02439
| 0
| 0.219512
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
600be5c5ddbc7dab6ccda28471085ab10500466f
| 3,429
|
py
|
Python
|
Python-Programs/print_font.py
|
nirajdhalani/Hacktoberfest-2023
|
75e3ae60fc82c277bf057ed6401a72958604008c
|
[
"MIT"
] | null | null | null |
Python-Programs/print_font.py
|
nirajdhalani/Hacktoberfest-2023
|
75e3ae60fc82c277bf057ed6401a72958604008c
|
[
"MIT"
] | null | null | null |
Python-Programs/print_font.py
|
nirajdhalani/Hacktoberfest-2023
|
75e3ae60fc82c277bf057ed6401a72958604008c
|
[
"MIT"
] | null | null | null |
# Python3 code to print input in your own font
name = "GEEK"
# To take input from User
# name = input("Enter your name: \n\n")
length = len(name)
l = ""
for x in range(0, length):
c = name[x]
c = c.upper()
if (c == "A"):
print("..######..\n..#....#..\n..######..", end = " ")
print("\n..#....#..\n..#....#..\n\n")
elif (c == "B"):
print("..######..\n..#....#..\n..#####...", end = " ")
print("\n..#....#..\n..######..\n\n")
elif (c == "C"):
print("..######..\n..#.......\n..#.......", end = " ")
print("\n..#.......\n..######..\n\n")
elif (c == "D"):
print("..#####...\n..#....#..\n..#....#..", end = " ")
print("\n..#....#..\n..#####...\n\n")
elif (c == "E"):
print("..######..\n..#.......\n..#####...", end = " ")
print("\n..#.......\n..######..\n\n")
elif (c == "F"):
print("..######..\n..#.......\n..#####...", end = " ")
print("\n..#.......\n..#.......\n\n")
elif (c == "G"):
print("..######..\n..#.......\n..#.####..", end = " ")
print("\n..#....#..\n..#####...\n\n")
elif (c == "H"):
print("..#....#..\n..#....#..\n..######..", end = " ")
print("\n..#....#..\n..#....#..\n\n")
elif (c == "I"):
print("..######..\n....##....\n....##....", end = " ")
print("\n....##....\n..######..\n\n")
elif (c == "J"):
print("..######..\n....##....\n....##....", end = " ")
print("\n..#.##....\n..####....\n\n")
elif (c == "K"):
print("..#...#...\n..#..#....\n..##......", end = " ")
print("\n..#..#....\n..#...#...\n\n")
elif (c == "L"):
print("..#.......\n..#.......\n..#.......", end = " ")
print("\n..#.......\n..######..\n\n")
elif (c == "M"):
print("..#....#..\n..##..##..\n..#.##.#..", end = " ")
print("\n..#....#..\n..#....#..\n\n")
elif (c == "N"):
print("..#....#..\n..##...#..\n..#.#..#..", end = " ")
print("\n..#..#.#..\n..#...##..\n\n")
elif (c == "O"):
print("..######..\n..#....#..\n..#....#..", end = " ")
print("\n..#....#..\n..######..\n\n")
elif (c == "P"):
print("..######..\n..#....#..\n..######..", end = " ")
print("\n..#.......\n..#.......\n\n")
elif (c == "Q"):
print("..######..\n..#....#..\n..#.#..#..", end = " ")
print("\n..#..#.#..\n..######..\n\n")
elif (c == "R"):
print("..######..\n..#....#..\n..#.##...", end = " ")
print("\n..#...#...\n..#....#..\n\n")
elif (c == "S"):
print("..######..\n..#.......\n..######..", end = " ")
print("\n.......#..\n..######..\n\n")
elif (c == "T"):
print("..######..\n....##....\n....##....", end = " ")
print("\n....##....\n....##....\n\n")
elif (c == "U"):
print("..#....#..\n..#....#..\n..#....#..", end = " ")
print("\n..#....#..\n..######..\n\n")
elif (c == "V"):
print("..#....#..\n..#....#..\n..#....#..", end = " ")
print("\n...#..#...\n....##....\n\n")
elif (c == "W"):
print("..#....#..\n..#....#..\n..#.##.#..", end = " ")
print("\n..##..##..\n..#....#..\n\n")
elif (c == "X"):
print("..#....#..\n...#..#...\n....##....", end = " ")
print("\n...#..#...\n..#....#..\n\n")
elif (c == "Y"):
print("..#....#..\n...#..#...\n....##....", end = " ")
print("\n....##....\n....##....\n\n")
elif (c == "Z"):
print("..######..\n......#...\n.....#....", end = " ")
print("\n....#.....\n..######..\n\n")
elif (c == " "):
print("..........\n..........\n..........", end = " ")
print("\n..........\n\n")
elif (c == "."):
print("----..----\n\n")
| 27.432
| 56
| 0.237679
| 366
| 3,429
| 2.226776
| 0.136612
| 0.267485
| 0.472393
| 0.331288
| 0.803681
| 0.803681
| 0.803681
| 0.803681
| 0.788957
| 0.765644
| 0
| 0.000694
| 0.15923
| 3,429
| 124
| 57
| 27.653226
| 0.281998
| 0.030913
| 0
| 0.292135
| 0
| 0
| 0.522447
| 0.495631
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.617978
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 12
|
601bef622263e4caf8883670fe3f43ce5950f1d7
| 4,088
|
py
|
Python
|
timeactive_algorithm_utils.py
|
gkovacs/tmi-browsing-behavior-prediction
|
0e7d44574003272ce457bf221bfa3e92002ad217
|
[
"MIT"
] | 1
|
2019-12-16T11:44:18.000Z
|
2019-12-16T11:44:18.000Z
|
timeactive_algorithm_utils.py
|
gkovacs/tmi-browsing-behavior-prediction
|
0e7d44574003272ce457bf221bfa3e92002ad217
|
[
"MIT"
] | null | null | null |
timeactive_algorithm_utils.py
|
gkovacs/tmi-browsing-behavior-prediction
|
0e7d44574003272ce457bf221bfa3e92002ad217
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
# md5: 534dac664bada9466dc70acfff63608e
# coding: utf-8
from tmilib import *
import csv
from itertools import izip
@jsonmemoized
def get_user_to_predicted_times_active_our_algorithm():
predictions_csv = csv.reader(sdir_open('catdata_test_insession_second_evaluation_predictions_datav4_modelv6.csv'))
predictions_header = next(predictions_csv)
print predictions_header
test_data_csv = csv.reader(sdir_open('catdata_test_insession_second_evaluation_v4.csv'))
test_data_header = next(test_data_csv)
print test_data_header
assert test_data_header[0] == 'user'
assert predictions_header[0] == 'predict'
output = {}
for predictions_line,test_line in izip(predictions_csv, test_data_csv):
predict = predictions_line[0] == 'T'
if predict:
user = test_line[0]
time_sec = int(test_line[1])
if user not in output:
output[user] = []
output[user].append(time_sec)
for k in output.keys():
output[k].sort()
return output
@jsonmemoized
def get_user_to_predicted_times_active_baseline_algorithm():
predictions_csv = csv.reader(sdir_open('catdata_test_insession_second_evaluation_predictions_datav4_modelv6.csv'))
predictions_header = next(predictions_csv)
print predictions_header
test_data_csv = csv.reader(sdir_open('catdata_test_insession_second_evaluation_v4.csv'))
test_data_header = next(test_data_csv)
print test_data_header
assert test_data_header[0] == 'user'
assert predictions_header[0] == 'predict'
log_fivemin = log(5*60)
output = {}
for predictions_line,test_line in izip(predictions_csv, test_data_csv):
sinceprev = float(test_line[3])
predict = sinceprev < log_fivemin
if predict:
user = test_line[0]
time_sec = int(test_line[1])
if user not in output:
output[user] = []
output[user].append(time_sec)
for k in output.keys():
output[k].sort()
return output
@jsonmemoized
def get_user_to_predicted_times_active_baseline3_algorithm():
predictions_csv = csv.reader(sdir_open('catdata_test_insession_second_evaluation_predictions_datav4_modelv6.csv'))
predictions_header = next(predictions_csv)
print predictions_header
test_data_csv = csv.reader(sdir_open('catdata_test_insession_second_evaluation_v4.csv'))
test_data_header = next(test_data_csv)
print test_data_header
assert test_data_header[0] == 'user'
assert predictions_header[0] == 'predict'
log_onemin = log(1*60)
output = {}
for predictions_line,test_line in izip(predictions_csv, test_data_csv):
sinceprev = float(test_line[3])
predict = sinceprev < log_onemin
if predict:
user = test_line[0]
time_sec = int(test_line[1])
if user not in output:
output[user] = []
output[user].append(time_sec)
for k in output.keys():
output[k].sort()
return output
@jsonmemoized
def get_user_to_predicted_times_active_baseline2_algorithm():
predictions_csv = csv.reader(sdir_open('catdata_test_insession_second_evaluation_predictions_datav4_modelv6.csv'))
predictions_header = next(predictions_csv)
print predictions_header
test_data_csv = csv.reader(sdir_open('catdata_test_insession_second_evaluation_v4.csv'))
test_data_header = next(test_data_csv)
print test_data_header
assert test_data_header[0] == 'user'
assert predictions_header[0] == 'predict'
log_onemin = log(1*60)
output = {}
user_to_is_active_in_majority_of_sessions = get_username_to_is_active_in_majority_of_sessions()
for predictions_line,test_line in izip(predictions_csv, test_data_csv):
user = test_line[0]
predict = user_to_is_active_in_majority_of_sessions[user]
if predict:
time_sec = int(test_line[1])
if user not in output:
output[user] = []
output[user].append(time_sec)
for k in output.keys():
output[k].sort()
return output
a=get_user_to_predicted_times_active_baseline_algorithm()
a=get_user_to_predicted_times_active_baseline3_algorithm()
a=get_user_to_predicted_times_active_baseline2_algorithm()
| 30.058824
| 116
| 0.752935
| 574
| 4,088
| 4.95993
| 0.134146
| 0.067439
| 0.046365
| 0.04496
| 0.928697
| 0.928697
| 0.928697
| 0.917808
| 0.819459
| 0.819459
| 0
| 0.018287
| 0.15729
| 4,088
| 135
| 117
| 30.281481
| 0.808128
| 0.017613
| 0
| 0.84
| 0
| 0
| 0.128863
| 0.117647
| 0
| 0
| 0
| 0
| 0.08
| 0
| null | null | 0
| 0.03
| null | null | 0.08
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
607614e52616e4e937413f9ebcfd09a30fc6822f
| 42,094
|
py
|
Python
|
importer/management/old_commands_not_required/block_parse_experiment_data.py
|
rkhleics/nhs-ei.website
|
9968916a5c442a2b33003f8a48b238df53ebded0
|
[
"MIT"
] | 1
|
2021-02-04T13:20:31.000Z
|
2021-02-04T13:20:31.000Z
|
importer/management/old_commands_not_required/block_parse_experiment_data.py
|
rkhleics/nhs-ei.website
|
9968916a5c442a2b33003f8a48b238df53ebded0
|
[
"MIT"
] | 77
|
2020-11-29T23:10:16.000Z
|
2022-03-23T11:47:51.000Z
|
importer/management/old_commands_not_required/block_parse_experiment_data.py
|
rkhleics/nhs-ei.website
|
9968916a5c442a2b33003f8a48b238df53ebded0
|
[
"MIT"
] | 3
|
2021-03-19T09:23:59.000Z
|
2021-08-31T21:49:36.000Z
|
"""
a_to_z_index_component # leave out
article_component
article_title
article_content
article_url
in_this_section_component
in_this_section_title
in_this_section_topics [
in_this_section_link_title,
in_this_section_link_url
]
priorities_component
priorities_section_title
our_priorities [
priority_title, priority_url
]
recent_posts_component
section_title
post_type
number_of_posts
show_see_all
select_category [
category ids
]
promos_component
promo_component [
promo_image: {
id
}
promo_title
promo_content
promo_url
]
topic_section_component
topic_section_title
in_this_section [
topic_title
topic_content
topic_url
]
"""
""" topic_section_component, priorities_component, in_this_section_component, recent_posts_component, promos_component"""
DATA_ALL = "[\
{'acf_fc_layout': 'a_to_z_index_component', \
'a_to_z_index_title': 'A to Z of topics', \
'a_to_z_index_content': '<p>Can’t find what you’re looking for? Our A to Z of topics helps you find information quickly:</p>\\n'},\
{'acf_fc_layout': 'topic_section_component', \
'topic_section_title': '', 'in_this_section': \
[\
{\
'topic_title': 'About the Involvement Hub', \
'topic_content': '<p>A source of information for people who want to get involved in our work or enable others to participate.</p>\\n', \
'topic_url': 'https://www.england.nhs.uk/participation/about/'\
}, {'topic_title': 'Information for commissioners', 'topic_content': '<p>Statutory guidance for Clinical Commissioning Groups on involving patients and the public.</p>\\n', 'topic_url': 'https://www.england.nhs.uk/participation/involvementguidance'}, {'topic_title': 'Surveys and consultations', 'topic_content': '<p>Have your say on our current consultations and surveys.</p>\\n', 'topic_url': 'https://www.engage.england.nhs.uk/'}, {'topic_title': 'Learning and development', 'topic_content': '<p>Workshops, webinars and elearning to improve understanding of the healthcare sector and participation. </p>\\n', 'topic_url': 'https://www.england.nhs.uk/participation/learning/'}, {'topic_title': 'Good practice and case studies', 'topic_content': '<p>Examples of good practice in involving people in healthcare services and service development.</p>\\n', 'topic_url': 'https://www.england.nhs.uk/participation/success/'}, {'topic_title': 'Resources and bite sized guides', 'topic_content': '<p>A variety of resources to support you in your involvement work, including bitesize guides to participation.</p>\\n', 'topic_url': 'https://www.england.nhs.uk/participation/resources/'}]}, \
\
{'acf_fc_layout': 'priorities_component', 'priorities_section_title': '', 'our_priorities': \
[\
{\
'nhsuk_highlight': False, \
'priority_title': 'How to get involved', \
'priority_url': 'https://www.england.nhs.uk/participation/get-involved/'}, \
{'nhsuk_highlight': False, 'priority_title': 'Why get involved', 'priority_url': 'https://www.england.nhs.uk/participation/why/'}, \
{'nhsuk_highlight': False, 'priority_title': 'Current opportunities', 'priority_url': 'https://www.england.nhs.uk/participation/get-involved/opportunities/'}]}, \
\
{'acf_fc_layout': 'in_this_section_component', \
'in_this_section_title': 'You may also be interested in', \
'in_this_section_topics': [\
{\
'type': 'link', \
'in_this_section_link_title': '1 An introduction to the NHS', \
'in_this_section_link_url': 'https://www.england.nhs.uk/participation/nhs/', \
'in_this_section_page': False\
},\
{\
'type': 'link', \
'in_this_section_link_title': '2 An introduction to the NHS', \
'in_this_section_link_url': 'https://www.england.nhs.uk/participation/nhs/', \
'in_this_section_page': False\
},\
{\
'type': 'link', \
'in_this_section_link_title': '3 An introduction to the NHS', \
'in_this_section_link_url': 'https://www.england.nhs.uk/participation/nhs/', \
'in_this_section_page': False\
}\
]\
}, \
{'acf_fc_layout': 'recent_posts_component', \
'section_title': 'News and blogs', \
'post_type': ['blog', 'post'], \
'number_of_posts': '3', \
'show_see_all': True, \
'select_category': [2687], \
'background': True, \
'background_colour': '#e8edee'}, \
{'acf_fc_layout': 'promos_component', 'promo_component': \
[\
{\
'nhsuk_highlight': False, \
'promo_image': {\
'ID': 129988, \
'id': 129988, \
'title': 'surgeons-400x267', \
'filename': 'surgeons-400x267.jpg', \
'filesize': 117610, \
'url': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/surgeons-400x267.jpg', \
'link': 'https://www.england.nhs.uk/ourwork/surgeons-400x267/', \
'alt': 'Surgeons operate on a patient', \
'author': '1904', \
'description': '', \
'caption': '', \
'name': 'surgeons-400x267', \
'status': 'inherit', \
'uploaded_to': 15136, \
'date': '2019-01-09 10:43:53', \
'modified': '2019-01-09 10:45:30', \
'menu_order': 0, \
'mime_type': 'image/jpeg', \
'type': 'image', \
'subtype': 'jpeg', \
'icon': 'https://www.england.nhs.uk/wp-includes/images/media/default.png', \
'width': 400, \
'height': 267, \
'sizes': {\
'thumbnail': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/surgeons-400x267-150x150.jpg', \
'thumbnail-width': 150, \
'thumbnail-height': 150, \
'medium': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/surgeons-400x267-300x200.jpg', \
'medium-width': 300, \
'medium-height': 200, \
'medium_large': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/surgeons-400x267.jpg', \
'medium_large-width': 400, \
'medium_large-height': 267, \
'large': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/surgeons-400x267.jpg', \
'large-width': 400, \
'large-height': 267, \
'1536x1536': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/surgeons-400x267.jpg', \
'1536x1536-width': 400, \
'1536x1536-height': 267, \
'2048x2048': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/surgeons-400x267.jpg', \
'2048x2048-width': 400, '2048x2048-height': 267\
}\
}, \
'promo_title': 'NHS Long Term Plan', \
'promo_content': '', \
'promo_url': 'https://www.england.nhs.uk/long-term-plan/'}, \
{'nhsuk_highlight': False, 'promo_image': {'ID': 78273, 'id': 78273, 'title': '', 'filename': 'cancer_400x267.jpg', 'filesize': 149433, 'url': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/cancer_400x267.jpg', 'link': 'https://www.england.nhs.uk/?attachment_id=78273', 'alt': 'Two women taking part in a fundraising event', 'author': '1920', 'description': '', 'caption': '', 'name': 'muddy-fundraising', 'status': 'inherit', 'uploaded_to': 78255, 'date': '2017-02-08 13:49:56', 'modified': '2017-10-09 14:18:57', 'menu_order': 0, 'mime_type': 'image/jpeg', 'type': 'image', 'subtype': 'jpeg', 'icon': 'https://www.england.nhs.uk/wp-includes/images/media/default.png', 'width': 400, 'height': 267, 'sizes': {'thumbnail': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/cancer_400x267-150x150.jpg', 'thumbnail-width': 150, 'thumbnail-height': 150, 'medium': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/cancer_400x267-300x200.jpg', 'medium-width': 300, 'medium-height': 200, 'medium_large': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/cancer_400x267.jpg', 'medium_large-width': 400, 'medium_large-height': 267, 'large': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/cancer_400x267.jpg', 'large-width': 400, 'large-height': 267, '1536x1536': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/cancer_400x267.jpg', '1536x1536-width': 400, '1536x1536-height': 267, '2048x2048': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/cancer_400x267.jpg', '2048x2048-width': 400, '2048x2048-height': 267}}, 'promo_title': 'Cancer', 'promo_content': '', 'promo_url': 'https://www.england.nhs.uk/cancer/'}, \
{'nhsuk_highlight': False, 'promo_image': {'ID': 78275, 'id': 78275, 'title': '', 'filename': 'mental-health-and-dementia_400x267.jpg', 'filesize': 127258, 'url': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/mental-health-and-dementia_400x267.jpg', 'link': 'https://www.england.nhs.uk/?attachment_id=78275', 'alt': 'A father and son read a letter', 'author': '1920', 'description': '', 'caption': '', 'name': 'relation-between-father-and-son', 'status': 'inherit', 'uploaded_to': 78255, 'date': '2017-02-08 13:50:41', 'modified': '2017-10-09 14:19:14', 'menu_order': 0, 'mime_type': 'image/jpeg', 'type': 'image', 'subtype': 'jpeg', 'icon': 'https://www.england.nhs.uk/wp-includes/images/media/default.png', 'width': 400, 'height': 267, 'sizes': {'thumbnail': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/mental-health-and-dementia_400x267-150x150.jpg', 'thumbnail-width': 150, 'thumbnail-height': 150, 'medium': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/mental-health-and-dementia_400x267-300x200.jpg', 'medium-width': 300, 'medium-height': 200, 'medium_large': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/mental-health-and-dementia_400x267.jpg', 'medium_large-width': 400, 'medium_large-height': 267, 'large': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/mental-health-and-dementia_400x267.jpg', 'large-width': 400, 'large-height': 267, '1536x1536': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/mental-health-and-dementia_400x267.jpg', '1536x1536-width': 400, '1536x1536-height': 267, '2048x2048': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/mental-health-and-dementia_400x267.jpg', '2048x2048-width': 400, '2048x2048-height': 267}}, 'promo_title': 'Mental health', 'promo_content': '', 'promo_url': 'https://www.england.nhs.uk/mental-health/'}]}, \
{'acf_fc_layout': 'article_component', \
'article_image': {\
'ID': 163377, \
'id': 163377, \
'title': 'Clear on cancer', \
'filename': 'Clear-on-cancer.jpg', \
'filesize': 137636, \
'url': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', \
'link': 'https://www.england.nhs.uk/homepage/clear-on-cancer/', \
'alt': 'Help Us, Help you', 'author': '2119', 'description': '', 'caption': '', \
'name': 'clear-on-cancer', 'status': 'inherit', 'uploaded_to': 98164, \
'date': '2020-10-09 16:07:59', 'modified': '2020-10-09 16:08:19', \
'menu_order': 0, 'mime_type': 'image/jpeg', 'type': 'image', 'subtype': 'jpeg', \
'icon': 'https://www.england.nhs.uk/wp-includes/images/media/default.png', 'width': 455, 'height': 487, \
'sizes': {\
'thumbnail': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer-150x150.jpg', \
'thumbnail-width': 150, 'thumbnail-height': 150, \
'medium': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer-280x300.jpg', \
'medium-width': 280, 'medium-height': 300, \
'medium_large': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', \
'medium_large-width': 455, 'medium_large-height': 487, \
'large': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', \
'large-width': 455, 'large-height': 487, \
'1536x1536': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', \
'1536x1536-width': 455, '1536x1536-height': 487, \
'2048x2048': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', \
'2048x2048-width': 455, '2048x2048-height': 487\
}, \
}, \
'article_image_alignment': 'has-left-aligned-image', \
'article_image_size': 'has-zero-width-image', \
'article_background': False, \
'article_background_colour': '', \
'article_title': '', \
'article_content': '<p>article 2NHS England and NHS Improvement leads the National Health Service (NHS) in England, find out more about what we do:</p>\\n', \
'article_url': ''}, \
{'acf_fc_layout': 'article_component', \
'article_image': False, \
'article_image_alignment': 'has-left-aligned-image', \
'article_image_size': 'has-zero-width-image', \
'article_background': False, \
'article_background_colour': '', \
'article_title': '', \
'article_content': '<p>article 3NHS England and NHS Improvement leads the National Health Service (NHS) in England, find out more about what we do:</p>\\n', \
'article_url': ''}, \
]"
DATA_1 = "[\
{'acf_fc_layout': 'topic_section_component', 'topic_section_title': '', 'in_this_section': \
[\
{\
'topic_title': 'About the Involvement Hub', \
'topic_content': '<p>A source of information for people who want to get involved in our work or enable others to participate.</p>\\n', \
'topic_url': 'https://www.england.nhs.uk/participation/about/'\
}, {'topic_title': 'Information for commissioners', 'topic_content': '<p>Statutory guidance for Clinical Commissioning Groups on involving patients and the public.</p>\\n', 'topic_url': 'https://www.england.nhs.uk/participation/involvementguidance'}, {'topic_title': 'Surveys and consultations', 'topic_content': '<p>Have your say on our current consultations and surveys.</p>\\n', 'topic_url': 'https://www.engage.england.nhs.uk/'}, {'topic_title': 'Learning and development', 'topic_content': '<p>Workshops, webinars and elearning to improve understanding of the healthcare sector and participation. </p>\\n', 'topic_url': 'https://www.england.nhs.uk/participation/learning/'}, {'topic_title': 'Good practice and case studies', 'topic_content': '<p>Examples of good practice in involving people in healthcare services and service development.</p>\\n', 'topic_url': 'https://www.england.nhs.uk/participation/success/'}, {'topic_title': 'Resources and bite sized guides', 'topic_content': '<p>A variety of resources to support you in your involvement work, including bitesize guides to participation.</p>\\n', 'topic_url': 'https://www.england.nhs.uk/participation/resources/'}]}, \
\
{'acf_fc_layout': 'priorities_component', 'priorities_section_title': '', 'our_priorities': \
[\
{\
'nhsuk_highlight': False, \
'priority_title': 'How to get involved', \
'priority_url': 'https://www.england.nhs.uk/participation/get-involved/'}, \
{'nhsuk_highlight': False, 'priority_title': 'Why get involved', 'priority_url': 'https://www.england.nhs.uk/participation/why/'}, \
{'nhsuk_highlight': False, 'priority_title': 'Current opportunities', 'priority_url': 'https://www.england.nhs.uk/participation/get-involved/opportunities/'}]}, \
\
{'acf_fc_layout': 'in_this_section_component', 'in_this_section_title': 'You may also be interested in', 'in_this_section_topics': [{'type': 'link', 'in_this_section_link_title': 'An introduction to the NHS', 'in_this_section_link_url': 'https://www.england.nhs.uk/participation/nhs/', 'in_this_section_page': False}]}, \
\
{'acf_fc_layout': 'recent_posts_component', 'section_title': 'News and blogs', 'post_type': ['blog', 'post'], 'number_of_posts': '3', 'show_see_all': True, 'select_category': [2687], 'background': True, 'background_colour': '#e8edee'}, \
\
{'acf_fc_layout': 'promos_component', 'promo_component': \
[\
{'nhsuk_highlight': False, 'promo_image': {'ID': 131123, 'id': 131123, 'title': 'intouch-banner-3', 'filename': 'intouch-banner-3.png', 'filesize': 315683, 'url': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/intouch-banner-3.png', 'link': 'https://www.england.nhs.uk/homepage/intouch-banner-3/', 'alt': 'Signup to receive the In touch bulletin', 'author': '1849', 'description': '', 'caption': '', 'name': 'intouch-banner-3', 'status': 'inherit', 'uploaded_to': 98164, 'date': '2019-01-29 18:43:47', 'modified': '2019-01-29 18:44:19', 'menu_order': 0, 'mime_type': 'image/png', 'type': 'image', 'subtype': 'png', 'icon': 'https://www.england.nhs.uk/wp-includes/images/media/default.png', 'width': 1200, 'height': 263, 'sizes': {'thumbnail': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/intouch-banner-3-150x150.png', 'thumbnail-width': 150, 'thumbnail-height': 150, 'medium': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/intouch-banner-3-300x66.png', 'medium-width': 300, 'medium-height': 66, 'medium_large': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/intouch-banner-3-768x168.png', 'medium_large-width': 768, 'medium_large-height': 168, 'large': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/intouch-banner-3-1024x224.png', 'large-width': 1024, 'large-height': 224, '1536x1536': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/intouch-banner-3.png', '1536x1536-width': 1200, '1536x1536-height': 263, '2048x2048': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/intouch-banner-3.png', '2048x2048-width': 1200, '2048x2048-height': 263}}, 'promo_title': '', 'promo_content': '', 'promo_url': 'https://www.england.nhs.uk/email-bulletins/in-touch-bulletin/'}]}]"
"""article_component, promos_component, priorities_component, a_to_z_index_component"""
DATA_2 = "[\
{'acf_fc_layout': 'article_component', \
'article_image': {\
'ID': 163377, \
'id': 163377, \
'title': 'Clear on cancer', \
'filename': 'Clear-on-cancer.jpg', \
'filesize': 137636, \
'url': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', \
'link': 'https://www.england.nhs.uk/homepage/clear-on-cancer/', \
'alt': 'Help Us, Help you', 'author': '2119', 'description': '', 'caption': '', \
'name': 'clear-on-cancer', 'status': 'inherit', 'uploaded_to': 98164, \
'date': '2020-10-09 16:07:59', 'modified': '2020-10-09 16:08:19', \
'menu_order': 0, 'mime_type': 'image/jpeg', 'type': 'image', 'subtype': 'jpeg', \
'icon': 'https://www.england.nhs.uk/wp-includes/images/media/default.png', 'width': 455, 'height': 487, \
'sizes': {\
'thumbnail': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer-150x150.jpg', \
'thumbnail-width': 150, 'thumbnail-height': 150, \
'medium': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer-280x300.jpg', \
'medium-width': 280, 'medium-height': 300, \
'medium_large': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', \
'medium_large-width': 455, 'medium_large-height': 487, \
'large': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', \
'large-width': 455, 'large-height': 487, \
'1536x1536': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', \
'1536x1536-width': 455, '1536x1536-height': 487, \
'2048x2048': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', \
'2048x2048-width': 455, '2048x2048-height': 487\
}, \
}, \
'article_image_alignment': 'has-left-aligned-image', \
'article_image_size': 'has-zero-width-image', \
'article_background': False, \
'article_background_colour': '', \
'article_title': '', \
'article_content': '<p>NHS England and NHS Improvement leads the National Health Service (NHS) in England, find out more about what we do:</p>\\n', \
'article_url': ''}, \
{'acf_fc_layout': 'promos_component', 'promo_component': \
[\
{\
'nhsuk_highlight': False, \
'promo_image': {\
'ID': 129988, \
'id': 129988, \
'title': 'surgeons-400x267', \
'filename': 'surgeons-400x267.jpg', \
'filesize': 117610, \
'url': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/surgeons-400x267.jpg', \
'link': 'https://www.england.nhs.uk/ourwork/surgeons-400x267/', \
'alt': 'Surgeons operate on a patient', \
'author': '1904', \
'description': '', \
'caption': '', \
'name': 'surgeons-400x267', \
'status': 'inherit', \
'uploaded_to': 15136, \
'date': '2019-01-09 10:43:53', \
'modified': '2019-01-09 10:45:30', \
'menu_order': 0, \
'mime_type': 'image/jpeg', \
'type': 'image', \
'subtype': 'jpeg', \
'icon': 'https://www.england.nhs.uk/wp-includes/images/media/default.png', \
'width': 400, \
'height': 267, \
'sizes': {'thumbnail': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/surgeons-400x267-150x150.jpg', 'thumbnail-width': 150, 'thumbnail-height': 150, 'medium': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/surgeons-400x267-300x200.jpg', 'medium-width': 300, 'medium-height': 200, 'medium_large': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/surgeons-400x267.jpg', 'medium_large-width': 400, 'medium_large-height': 267, 'large': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/surgeons-400x267.jpg', 'large-width': 400, 'large-height': 267, '1536x1536': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/surgeons-400x267.jpg', '1536x1536-width': 400, '1536x1536-height': 267, '2048x2048': 'https://www.england.nhs.uk/wp-content/uploads/2019/01/surgeons-400x267.jpg', '2048x2048-width': 400, '2048x2048-height': 267}}, 'promo_title': 'NHS Long Term Plan', 'promo_content': '', 'promo_url': 'https://www.england.nhs.uk/long-term-plan/'}, \
{'nhsuk_highlight': False, 'promo_image': {'ID': 78273, 'id': 78273, 'title': '', 'filename': 'cancer_400x267.jpg', 'filesize': 149433, 'url': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/cancer_400x267.jpg', 'link': 'https://www.england.nhs.uk/?attachment_id=78273', 'alt': 'Two women taking part in a fundraising event', 'author': '1920', 'description': '', 'caption': '', 'name': 'muddy-fundraising', 'status': 'inherit', 'uploaded_to': 78255, 'date': '2017-02-08 13:49:56', 'modified': '2017-10-09 14:18:57', 'menu_order': 0, 'mime_type': 'image/jpeg', 'type': 'image', 'subtype': 'jpeg', 'icon': 'https://www.england.nhs.uk/wp-includes/images/media/default.png', 'width': 400, 'height': 267, 'sizes': {'thumbnail': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/cancer_400x267-150x150.jpg', 'thumbnail-width': 150, 'thumbnail-height': 150, 'medium': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/cancer_400x267-300x200.jpg', 'medium-width': 300, 'medium-height': 200, 'medium_large': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/cancer_400x267.jpg', 'medium_large-width': 400, 'medium_large-height': 267, 'large': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/cancer_400x267.jpg', 'large-width': 400, 'large-height': 267, '1536x1536': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/cancer_400x267.jpg', '1536x1536-width': 400, '1536x1536-height': 267, '2048x2048': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/cancer_400x267.jpg', '2048x2048-width': 400, '2048x2048-height': 267}}, 'promo_title': 'Cancer', 'promo_content': '', 'promo_url': 'https://www.england.nhs.uk/cancer/'}, \
{'nhsuk_highlight': False, 'promo_image': {'ID': 78275, 'id': 78275, 'title': '', 'filename': 'mental-health-and-dementia_400x267.jpg', 'filesize': 127258, 'url': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/mental-health-and-dementia_400x267.jpg', 'link': 'https://www.england.nhs.uk/?attachment_id=78275', 'alt': 'A father and son read a letter', 'author': '1920', 'description': '', 'caption': '', 'name': 'relation-between-father-and-son', 'status': 'inherit', 'uploaded_to': 78255, 'date': '2017-02-08 13:50:41', 'modified': '2017-10-09 14:19:14', 'menu_order': 0, 'mime_type': 'image/jpeg', 'type': 'image', 'subtype': 'jpeg', 'icon': 'https://www.england.nhs.uk/wp-includes/images/media/default.png', 'width': 400, 'height': 267, 'sizes': {'thumbnail': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/mental-health-and-dementia_400x267-150x150.jpg', 'thumbnail-width': 150, 'thumbnail-height': 150, 'medium': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/mental-health-and-dementia_400x267-300x200.jpg', 'medium-width': 300, 'medium-height': 200, 'medium_large': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/mental-health-and-dementia_400x267.jpg', 'medium_large-width': 400, 'medium_large-height': 267, 'large': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/mental-health-and-dementia_400x267.jpg', 'large-width': 400, 'large-height': 267, '1536x1536': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/mental-health-and-dementia_400x267.jpg', '1536x1536-width': 400, '1536x1536-height': 267, '2048x2048': 'https://www.england.nhs.uk/wp-content/uploads/2017/02/mental-health-and-dementia_400x267.jpg', '2048x2048-width': 400, '2048x2048-height': 267}}, 'promo_title': 'Mental health', 'promo_content': '', 'promo_url': 'https://www.england.nhs.uk/mental-health/'}]}, \
{'acf_fc_layout': 'promos_component', 'promo_component': \
[\
{'nhsuk_highlight': False, 'promo_image': {'ID': 88622, 'id': 88622, 'title': '', 'filename': 'urgent-and-emergency-care_400x267.jpg', 'filesize': 103334, 'url': 'https://www.england.nhs.uk/wp-content/uploads/2017/03/urgent-and-emergency-care_400x267.jpg', 'link': 'https://www.england.nhs.uk/ourwork/ambulance-crew-pulling-stretcher/', 'alt': 'An ambulance crew pulling a stretcher', 'author': '1904', 'description': '', 'caption': '', 'name': 'ambulance-crew-pulling-stretcher', 'status': 'inherit', 'uploaded_to': 15136, 'date': '2017-03-29 13:06:38', 'modified': '2017-10-09 14:19:22', 'menu_order': 0, 'mime_type': 'image/jpeg', 'type': 'image', 'subtype': 'jpeg', 'icon': 'https://www.england.nhs.uk/wp-includes/images/media/default.png', 'width': 400, 'height': 267, 'sizes': {'thumbnail': 'https://www.england.nhs.uk/wp-content/uploads/2017/03/urgent-and-emergency-care_400x267-150x150.jpg', 'thumbnail-width': 150, 'thumbnail-height': 150, 'medium': 'https://www.england.nhs.uk/wp-content/uploads/2017/03/urgent-and-emergency-care_400x267-300x200.jpg', 'medium-width': 300, 'medium-height': 200, 'medium_large': 'https://www.england.nhs.uk/wp-content/uploads/2017/03/urgent-and-emergency-care_400x267.jpg', 'medium_large-width': 400, 'medium_large-height': 267, 'large': 'https://www.england.nhs.uk/wp-content/uploads/2017/03/urgent-and-emergency-care_400x267.jpg', 'large-width': 400, 'large-height': 267, '1536x1536': 'https://www.england.nhs.uk/wp-content/uploads/2017/03/urgent-and-emergency-care_400x267.jpg', '1536x1536-width': 400, '1536x1536-height': 267, '2048x2048': 'https://www.england.nhs.uk/wp-content/uploads/2017/03/urgent-and-emergency-care_400x267.jpg', '2048x2048-width': 400, '2048x2048-height': 267}}, 'promo_title': 'Urgent and emergency care', 'promo_content': '', 'promo_url': 'https://www.england.nhs.uk/urgent-emergency-care/'}, \
{'nhsuk_highlight': False, 'promo_image': {'ID': 88618, 'id': 88618, 'title': '', 'filename': 'primary-care_400x267.jpg', 'filesize': 104232, 'url': 'https://www.england.nhs.uk/wp-content/uploads/2017/03/primary-care_400x267.jpg', 'link': 'https://www.england.nhs.uk/five-year-forward-view/next-steps-on-the-nhs-five-year-forward-view/doctor-chatting-to-male-patient-2/', 'alt': 'A doctor in discussion with a patient', 'author': '1904', 'description': '', 'caption': '', 'name': 'doctor-chatting-to-male-patient-2', 'status': 'inherit', 'uploaded_to': 88790, 'date': '2017-03-29 13:06:19', 'modified': '2019-03-11 15:40:55', 'menu_order': 0, 'mime_type': 'image/jpeg', 'type': 'image', 'subtype': 'jpeg', 'icon': 'https://www.england.nhs.uk/wp-includes/images/media/default.png', 'width': 400, 'height': 267, 'sizes': {'thumbnail': 'https://www.england.nhs.uk/wp-content/uploads/2017/03/primary-care_400x267-150x150.jpg', 'thumbnail-width': 150, 'thumbnail-height': 150, 'medium': 'https://www.england.nhs.uk/wp-content/uploads/2017/03/primary-care_400x267-300x200.jpg', 'medium-width': 300, 'medium-height': 200, 'medium_large': 'https://www.england.nhs.uk/wp-content/uploads/2017/03/primary-care_400x267.jpg', 'medium_large-width': 400, 'medium_large-height': 267, 'large': 'https://www.england.nhs.uk/wp-content/uploads/2017/03/primary-care_400x267.jpg', 'large-width': 400, 'large-height': 267, '1536x1536': 'https://www.england.nhs.uk/wp-content/uploads/2017/03/primary-care_400x267.jpg', '1536x1536-width': 400, '1536x1536-height': 267, '2048x2048': 'https://www.england.nhs.uk/wp-content/uploads/2017/03/primary-care_400x267.jpg', '2048x2048-width': 400, '2048x2048-height': 267}}, 'promo_title': 'Primary care', 'promo_content': '', 'promo_url': 'https://www.england.nhs.uk/primary-care/'}, \
{'nhsuk_highlight': False, 'promo_image': {'ID': 88505, 'id': 88505, 'title': '', 'filename': 'patients-01_400x267.jpg', 'filesize': 121593, 'url': 'https://www.england.nhs.uk/wp-content/uploads/2012/10/patients-01_400x267.jpg', 'link': 'https://www.england.nhs.uk/ourwork/baby-clinic-visit-for-toddler-and-mum-3/', 'alt': 'A mother and child chat with a nurse', 'author': '1920', 'description': '', 'caption': '', 'name': 'baby-clinic-visit-for-toddler-and-mum-3', 'status': 'inherit', 'uploaded_to': 15136, 'date': '2017-03-28 14:34:08', 'modified': '2017-10-09 14:19:50', 'menu_order': 0, 'mime_type': 'image/jpeg', 'type': 'image', 'subtype': 'jpeg', 'icon': 'https://www.england.nhs.uk/wp-includes/images/media/default.png', 'width': 400, 'height': 267, 'sizes': {'thumbnail': 'https://www.england.nhs.uk/wp-content/uploads/2012/10/patients-01_400x267-150x150.jpg', 'thumbnail-width': 150, 'thumbnail-height': 150, 'medium': 'https://www.england.nhs.uk/wp-content/uploads/2012/10/patients-01_400x267-300x200.jpg', 'medium-width': 300, 'medium-height': 200, 'medium_large': 'https://www.england.nhs.uk/wp-content/uploads/2012/10/patients-01_400x267.jpg', 'medium_large-width': 400, 'medium_large-height': 267, 'large': 'https://www.england.nhs.uk/wp-content/uploads/2012/10/patients-01_400x267.jpg', 'large-width': 400, 'large-height': 267, '1536x1536': 'https://www.england.nhs.uk/wp-content/uploads/2012/10/patients-01_400x267.jpg', '1536x1536-width': 400, '1536x1536-height': 267, '2048x2048': 'https://www.england.nhs.uk/wp-content/uploads/2012/10/patients-01_400x267.jpg', '2048x2048-width': 400, '2048x2048-height': 267}}, 'promo_title': 'Integrated care', 'promo_content': '', 'promo_url': 'https://www.england.nhs.uk/integratedcare/'}]}, \
{'acf_fc_layout': 'priorities_component', 'priorities_section_title': '', 'our_priorities': \
[\
{'nhsuk_highlight': False, 'priority_title': 'NHS Diabetes Prevention Programme (NHS DPP)', 'priority_url': 'https://www.england.nhs.uk/diabetes/'}, \
{'nhsuk_highlight': False, 'priority_title': 'Nursing, midwifery and care staff', 'priority_url': 'https://www.england.nhs.uk/nursingmidwifery/'}, \
{'nhsuk_highlight': False, 'priority_title': 'Clinical review of NHS access standards', 'priority_url': 'https://www.england.nhs.uk/clinically-led-review-nhs-access-standards/'}, \
{'nhsuk_highlight': False, 'priority_title': 'NHS Standard Contract ', 'priority_url': 'https://www.england.nhs.uk/nhs-standard-contract/'}, \
{'nhsuk_highlight': False, 'priority_title': 'NHS RightCare', 'priority_url': 'https://www.england.nhs.uk/rightcare/'}, \
{'nhsuk_highlight': False, 'priority_title': 'Learning disability and autism', 'priority_url': 'https://www.england.nhs.uk/learning-disabilities/'}]}, \
{'acf_fc_layout': 'a_to_z_index_component', 'a_to_z_index_title': 'A to Z of topics', 'a_to_z_index_content': '<p>Can’t find what you’re looking for? Our A to Z of topics helps you find information quickly:</p>\\n'}]"
"""promos_component, topic_section_component, in_this_section_component"""
DATA_3 = "[\
{'acf_fc_layout': 'promos_component', 'promo_component': \
[{'nhsuk_highlight': False, 'promo_image': {'ID': 102119, 'id': 102119, 'title': '', 'filename': 'about-us_1200x300.jpg', 'filesize': 265121, 'url': 'https://www.england.nhs.uk/wp-content/uploads/2017/08/about-us_1200x300.jpg', 'link': 'https://www.england.nhs.uk/about/medical-staff-having-meeting-at-nurse-station/', 'alt': '', 'author': '1920', 'description': '', 'caption': '', 'name': 'medical-staff-having-meeting-at-nurse-station', 'status': 'inherit', 'uploaded_to': 991, 'date': '2017-08-23 10:43:55', 'modified': '2018-09-13 14:41:24', 'menu_order': 0, 'mime_type': 'image/jpeg', 'type': 'image', 'subtype': 'jpeg', 'icon': 'https://www.england.nhs.uk/wp-includes/images/media/default.png', 'width': 1200, 'height': 300, 'sizes': {'thumbnail': 'https://www.england.nhs.uk/wp-content/uploads/2017/08/about-us_1200x300-150x150.jpg', 'thumbnail-width': 150, 'thumbnail-height': 150, 'medium': 'https://www.england.nhs.uk/wp-content/uploads/2017/08/about-us_1200x300-300x75.jpg', 'medium-width': 300, 'medium-height': 75, 'medium_large': 'https://www.england.nhs.uk/wp-content/uploads/2017/08/about-us_1200x300-768x192.jpg', 'medium_large-width': 768, 'medium_large-height': 192, 'large': 'https://www.england.nhs.uk/wp-content/uploads/2017/08/about-us_1200x300-1024x256.jpg', 'large-width': 1024, 'large-height': 256, '1536x1536': 'https://www.england.nhs.uk/wp-content/uploads/2017/08/about-us_1200x300.jpg', '1536x1536-width': 1200, '1536x1536-height': 300, '2048x2048': 'https://www.england.nhs.uk/wp-content/uploads/2017/08/about-us_1200x300.jpg', '2048x2048-width': 1200, '2048x2048-height': 300}}, 'promo_title': '', 'promo_content': '', 'promo_url': ''}]}, \
{'acf_fc_layout': 'topic_section_component', 'topic_section_title': '', 'in_this_section': \
[{'topic_title': 'What do we do?', 'topic_content': '<p>NHS England and NHS Improvement leads the National Health Service (NHS) in England, find out more about what we do.</p>\\n', 'topic_url': 'https://www.england.nhs.uk/about/about-nhs-england'}, {'topic_title': 'Our Board', 'topic_content': '<p>Find out about our Board, its members, its roles and responsibilities and dates of future meetings.</p>\\n', 'topic_url': 'https://www.england.nhs.uk/about/board/'}, {'topic_title': 'Corporate publications', 'topic_content': '<p>Read our annual report, business plan, financial performance reports and the Five Year Forward View.</p>\\n', 'topic_url': 'https://www.england.nhs.uk/publications'}, {'topic_title': 'Regional teams', 'topic_content': '<p>There are seven regional teams that support the commissioning of healthcare services for different parts of the country.</p>\\n', 'topic_url': 'https://www.england.nhs.uk/about/regional-area-teams/'}, {'topic_title': 'Our work', 'topic_content': \"<p>Learn about what we're doing in cancer, primary care, mental health, urgent and emergency care and other key areas.</p>\\n\", 'topic_url': 'https://www.england.nhs.uk/ourwork'}]}, \
{'acf_fc_layout': 'in_this_section_component', 'in_this_section_title': 'You may also be interested in', 'in_this_section_topics': [{'type': 'link', 'in_this_section_link_title': 'Sustainable development', 'in_this_section_link_url': 'https://www.england.nhs.uk/about/sustainable-development/', 'in_this_section_page': False}, {'type': 'link', 'in_this_section_link_title': 'Working for us', 'in_this_section_link_url': 'https://www.england.nhs.uk/about/working-for/', 'in_this_section_page': False}, {'type': 'link', 'in_this_section_link_title': 'Contact us', 'in_this_section_link_url': 'https://www.england.nhs.uk/contact-us/', 'in_this_section_page': False}, {'type': 'link', 'in_this_section_link_title': 'Equality, diversity and health inequalities', 'in_this_section_link_url': 'https://www.england.nhs.uk/about/equality/', 'in_this_section_page': False}]}]"
"""
#DATA_HOME_PAGE = '[{'acf_fc_layout': 'article_component', 'article_image': {'ID': 163377, 'id': 163377, 'title': 'Clear on cancer', 'filename': 'Clear-on-cancer.jpg', 'filesize': 137636, 'url': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', 'link': 'https://www.england.nhs.uk/homepage/clear-on-cancer/', 'alt': 'Help Us, Help you', 'author': '2119', 'description': '', 'caption': '', 'name': 'clear-on-cancer', 'status': 'inherit', 'uploaded_to': 98164, 'date': '2020-10-09 16:07:59', 'modified': '2020-10-09 16:08:19', 'menu_order': 0, 'mime_type': 'image/jpeg', 'type': 'image', 'subtype': 'jpeg', 'icon': 'https://www.england.nhs.uk/wp-includes/images/media/default.png', 'width': 455, 'height': 487, 'sizes': {'thumbnail': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer-150x150.jpg', 'thumbnail-width': 150, 'thumbnail-height': 150, 'medium': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer-280x300.jpg', 'medium-width': 280, 'medium-height': 300, 'medium_large': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', 'medium_large-width': 455, 'medium_large-height': 487, 'large': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', 'large-width': 455, 'large-height': 487, '1536x1536': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', '1536x1536-width': 455, '1536x1536-height': 487, '2048x2048': 'https://www.england.nhs.uk/wp-content/uploads/2020/10/Clear-on-cancer.jpg', '2048x2048-width': 455, '2048x2048-height': 487}}, 'article_image_alignment': 'has-left-aligned-image', 'article_image_size': 'has-half-width-image', 'article_background': False, 'article_background_colour': '#e8edee', 'article_title': 'Help Us, Help You - Accessing NHS Services campaign', 'article_content': '<p>The NHS has introduced a range of measures to ensure the safety of patients and the ‘Help Us, Help You’ campaign will help to reassure them they can receive medical care safely. This new campaign launched in October, to address the barriers deterring people from accessing NHS services during the pandemic.</p>\n<p>The first phase of the campaign encourages people to contact their GP if they are worried about a <a href="https://www.nhs.uk/conditions/cancer/symptoms/">symptom that could be cancer</a>.\xa0Further phases will remind pregnant women to attend check-ups and seek advice if they are worried about their baby, ask patients to keep their routine elective appointments, and encourage those with mental health issues to access NHS support.</p>\n<p><strong>Your NHS is here to see you, safely. </strong></p>\n<p>Stakeholders, partners and regional teams can access resources to help them support the campaign via the <a href="https://campaignresources.phe.gov.uk/resources/campaigns/113-help-us">Campaign Resource Centre</a>.</p>\n', 'article_url': 'https://www.england.nhs.uk/2020/10/celebs-and-lockdown-heroes-urge-public-to-get-cancer-symptoms-checked-and-attend-routine-appointments/'}, {'acf_fc_layout': 'article_component', 'article_image': False, 'article_image_alignment': 'has-left-aligned-image', 'article_image_size': 'has-zero-width-image', 'article_background': True, 'article_background_colour': '#e8edee', 'article_title': 'How could this website work better for you?', 'article_content': '<p>We are building a new single website for NHS England and NHS Improvement. In the meantime, you can find information on both providers and commissioned healthcare services on this website.</p>\n<p>We want to understand how you’re using our website so we can make the new one better. Please\xa0<a href="https://www.england.nhs.uk/contact-us/feedback/">fill out this short form</a>\xa0if you’d like to improve the site.</p>\n', 'article_url': ''}]'
"""
| 127.945289
| 3,811
| 0.64824
| 5,521
| 42,094
| 4.818873
| 0.092193
| 0.052922
| 0.078933
| 0.117046
| 0.867544
| 0.839579
| 0.82274
| 0.799474
| 0.790641
| 0.783537
| 0
| 0.096922
| 0.160997
| 42,094
| 329
| 3,812
| 127.945289
| 0.656398
| 0.019599
| 0
| 0.728302
| 0
| 0.279245
| 0.698232
| 0.039928
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| 1
| 0
| 1
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 11
|
607cbebbd14f1940da85fb97519bb16e374eb92d
| 186
|
py
|
Python
|
davis/models/utils/crypto.py
|
deanwang539/davis_v0.1
|
36416b4495fbe8ae15994c4d0e009b8955bef7ba
|
[
"MIT"
] | null | null | null |
davis/models/utils/crypto.py
|
deanwang539/davis_v0.1
|
36416b4495fbe8ae15994c4d0e009b8955bef7ba
|
[
"MIT"
] | null | null | null |
davis/models/utils/crypto.py
|
deanwang539/davis_v0.1
|
36416b4495fbe8ae15994c4d0e009b8955bef7ba
|
[
"MIT"
] | null | null | null |
from passlib.hash import pbkdf2_sha256
def encrypt_psw(psw):
return pbkdf2_sha256.hash(psw)
def compare_psw(current, saved):
return pbkdf2_sha256.verify(current, saved)
| 23.25
| 48
| 0.752688
| 26
| 186
| 5.192308
| 0.538462
| 0.266667
| 0.266667
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.077419
| 0.166667
| 186
| 7
| 49
| 26.571429
| 0.793548
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| false
| 0.2
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 1
| 0
|
0
| 8
|
6089a095480d0ba40127815463929619d7671f35
| 1,850
|
py
|
Python
|
test/test_elastic_retriever.py
|
mrusic/haystack
|
66a1893f79eed4de0198ec73d9d5dbbaa3495723
|
[
"Apache-2.0"
] | 2
|
2020-09-11T10:39:23.000Z
|
2020-10-29T09:56:48.000Z
|
test/test_elastic_retriever.py
|
adbmd/haystack
|
8e044dc16fe23d31c17d59bed8bb40e43a5bbb3e
|
[
"Apache-2.0"
] | null | null | null |
test/test_elastic_retriever.py
|
adbmd/haystack
|
8e044dc16fe23d31c17d59bed8bb40e43a5bbb3e
|
[
"Apache-2.0"
] | 1
|
2021-05-07T06:08:29.000Z
|
2021-05-07T06:08:29.000Z
|
from haystack.retriever.sparse import ElasticsearchRetriever
import pytest
@pytest.mark.parametrize("document_store_with_docs", [("elasticsearch")], indirect=True)
def test_elasticsearch_retrieval(document_store_with_docs):
retriever = ElasticsearchRetriever(document_store=document_store_with_docs)
res = retriever.retrieve(query="Who lives in Berlin?")
assert res[0].text == "My name is Carla and I live in Berlin"
assert len(res) == 3
assert res[0].meta["name"] == "filename1"
@pytest.mark.parametrize("document_store_with_docs", [("elasticsearch")], indirect=True)
def test_elasticsearch_retrieval_filters(document_store_with_docs):
retriever = ElasticsearchRetriever(document_store=document_store_with_docs)
res = retriever.retrieve(query="Who lives in Berlin?", filters={"name": ["filename1"]})
assert res[0].text == "My name is Carla and I live in Berlin"
assert len(res) == 1
assert res[0].meta["name"] == "filename1"
res = retriever.retrieve(query="Who lives in Berlin?", filters={"name":["filename1"], "meta_field": ["not_existing_value"]})
assert len(res) == 0
res = retriever.retrieve(query="Who lives in Berlin?", filters={"name":["filename1"], "not_existing_field": ["not_existing_value"]})
assert len(res) == 0
retriever = ElasticsearchRetriever(document_store=document_store_with_docs)
res = retriever.retrieve(query="Who lives in Berlin?", filters={"name":["filename1"], "meta_field": ["test1","test2"]})
assert res[0].text == "My name is Carla and I live in Berlin"
assert len(res) == 1
assert res[0].meta["name"] == "filename1"
retriever = ElasticsearchRetriever(document_store=document_store_with_docs)
res = retriever.retrieve(query="Who lives in Berlin?", filters={"name":["filename1"], "meta_field":["test2"]})
assert len(res) == 0
| 51.388889
| 136
| 0.721081
| 239
| 1,850
| 5.405858
| 0.200837
| 0.120743
| 0.105263
| 0.130031
| 0.911765
| 0.911765
| 0.890867
| 0.890867
| 0.842105
| 0.842105
| 0
| 0.014321
| 0.131892
| 1,850
| 35
| 137
| 52.857143
| 0.790162
| 0
| 0
| 0.607143
| 0
| 0
| 0.274595
| 0.025946
| 0
| 0
| 0
| 0
| 0.428571
| 1
| 0.071429
| false
| 0
| 0.071429
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
60a0af2207a213350baa25eb245c3addce5dd501
| 8,800
|
py
|
Python
|
django_openpay/admin.py
|
letops/django-openpay
|
4e9d050327a0e99b99031b771d8f3b670a91ddae
|
[
"MIT"
] | 3
|
2016-12-06T01:30:15.000Z
|
2020-09-18T19:40:49.000Z
|
django_openpay/admin.py
|
letops/django-openpay
|
4e9d050327a0e99b99031b771d8f3b670a91ddae
|
[
"MIT"
] | 5
|
2016-12-12T14:53:24.000Z
|
2022-01-06T20:58:54.000Z
|
django_openpay/admin.py
|
letops/django-openpay
|
4e9d050327a0e99b99031b771d8f3b670a91ddae
|
[
"MIT"
] | 6
|
2017-01-04T11:52:24.000Z
|
2020-07-22T20:18:33.000Z
|
from django.contrib import admin
from . import models, ugettext
from .utils import get_customer_model
CustomerModel = get_customer_model()
# Register your models here.
@admin.register(models.Address)
class AddressAdmin(admin.ModelAdmin):
model = models.Address
list_display = ('line1', 'line2', 'line3', 'city', 'state',
'country_code', 'postal_code')
def get_readonly_fields(self, request, obj=None):
return models.Address.get_readonly_fields(obj)
@admin.register(models.Card)
class CardAdmin(admin.ModelAdmin):
model = models.Card
actions = ['refresh', 'dismiss', ]
list_display = ('pk', 'openpay_id', 'alias', 'holder', 'customer',
'creation_date', 'deleted')
def refresh(self, request, queryset):
refreshed = 0
for instance in queryset:
instance.skip_signal = True
instance.op_refresh(save=True)
refreshed = refreshed + 1
if refreshed == 1:
message_bit = "1 instance was"
else:
message_bit = "%s instances were" % refreshed
self.message_user(
request,
"%s successfully refreshed." % message_bit
)
refresh.short_description = ugettext('Refresh selected instances')
def dismiss(self, request, queryset):
dismissed = 0
for instance in queryset:
instance.skip_signal = True
instance.op_dismiss(save=True)
dismissed = dismissed + 1
if dismissed == 1:
message_bit = "1 instance was"
else:
message_bit = "%s instances were" % dismissed
self.message_user(
request,
"%s successfully dismissed." % message_bit
)
dismiss.short_description = ugettext('Dismiss selected instances')
def get_readonly_fields(self, request, obj=None):
return models.Card.get_readonly_fields(obj)
@admin.register(models.Plan)
class PlanAdmin(admin.ModelAdmin):
model = models.Plan
actions = ['refresh', 'dismiss', ]
list_display = ('name', 'openpay_id', 'status', 'amount', 'repeat_every',
'repeat_unit', 'creation_date', 'deleted')
def refresh(self, request, queryset):
refreshed = 0
for instance in queryset:
instance.skip_signal = True
instance.op_refresh(save=True)
refreshed = refreshed + 1
if refreshed == 1:
message_bit = "1 instance was"
else:
message_bit = "%s instances were" % refreshed
self.message_user(
request,
"%s successfully refreshed." % message_bit
)
refresh.short_description = ugettext('Refresh selected instances')
def dismiss(self, request, queryset):
dismissed = 0
for instance in queryset:
instance.skip_signal = True
instance.op_dismiss(save=True)
dismissed = dismissed + 1
if dismissed == 1:
message_bit = "1 instance was"
else:
message_bit = "%s instances were" % dismissed
self.message_user(
request,
"%s successfully dismissed." % message_bit
)
dismiss.short_description = ugettext('Dismiss selected instances')
def get_readonly_fields(self, request, obj=None):
return models.Plan.get_readonly_fields(obj)
@admin.register(models.Subscription)
class SubscriptionAdmin(admin.ModelAdmin):
model = models.Subscription
actions = ['refresh', 'dismiss', ]
list_display = ('pk', 'openpay_id', 'customer', 'plan', 'card', 'status',
'creation_date', 'deleted')
def refresh(self, request, queryset):
refreshed = 0
for instance in queryset:
instance.skip_signal = True
instance.op_refresh(save=True)
refreshed = refreshed + 1
if refreshed == 1:
message_bit = "1 instance was"
else:
message_bit = "%s instances were" % refreshed
self.message_user(
request,
"%s successfully refreshed." % message_bit
)
refresh.short_description = ugettext('Refresh selected instances')
def dismiss(self, request, queryset):
dismissed = 0
for instance in queryset:
instance.skip_signal = True
instance.op_dismiss(save=True)
dismissed = dismissed + 1
if dismissed == 1:
message_bit = "1 instance was"
else:
message_bit = "%s instances were" % dismissed
self.message_user(
request,
"%s successfully dismissed." % message_bit
)
dismiss.short_description = ugettext('Dismiss selected instances')
def get_readonly_fields(self, request, obj=None):
return models.Subscription.get_readonly_fields(obj)
@admin.register(models.Refund)
class RefundAdmin(admin.ModelAdmin):
model = models.Refund
actions = ['refresh', 'dismiss', ]
list_display = ('pk', 'openpay_id', 'customer', 'charge', 'amount',
'creation_date', 'deleted')
def refresh(self, request, queryset):
refreshed = 0
for instance in queryset:
instance.skip_signal = True
instance.op_refresh(save=True)
refreshed = refreshed + 1
if refreshed == 1:
message_bit = "1 instance was"
else:
message_bit = "%s instances were" % refreshed
self.message_user(
request,
"%s successfully refreshed." % message_bit
)
refresh.short_description = ugettext('Refresh selected instances')
def dismiss(self, request, queryset):
dismissed = 0
for instance in queryset:
instance.skip_signal = True
instance.op_dismiss(save=True)
dismissed = dismissed + 1
if dismissed == 1:
message_bit = "1 instance was"
else:
message_bit = "%s instances were" % dismissed
self.message_user(
request,
"%s successfully dismissed." % message_bit
)
dismiss.short_description = ugettext('Dismiss selected instances')
def get_readonly_fields(self, request, obj=None):
return models.Refund.get_readonly_fields(obj)
@admin.register(models.Charge)
class ChargeAdmin(admin.ModelAdmin):
model = models.Charge
actions = ['refresh', 'capture', 'refund', 'dismiss', ]
list_display = ('pk', 'openpay_id', 'customer', 'card', 'amount',
'creation_date', 'deleted')
def refresh(self, request, queryset):
refreshed = 0
for instance in queryset:
instance.skip_signal = True
instance.op_refresh(save=True)
refreshed = refreshed + 1
if refreshed == 1:
message_bit = "1 instance was"
else:
message_bit = "%s instances were" % refreshed
self.message_user(
request,
"%s successfully refreshed." % message_bit
)
refresh.short_description = ugettext('Refresh selected instances')
def capture(self, request, queryset):
captured = 0
for charge in queryset:
charge.op_capture()
captured = captured + 1
if captured == 1:
message_bit = "1 charge was"
else:
message_bit = "%s charges were" % captured
self.message_user(
request,
"%s successfully captured." % message_bit
)
capture.short_description = ugettext('Capture selected charges')
def refund(self, request, queryset):
refunded = 0
for charge in queryset:
charge.op_refund()
refunded = refunded + 1
if refunded == 1:
message_bit = "1 charge was"
else:
message_bit = "%s charges were" % refunded
self.message_user(
request,
"%s successfully refunded." % message_bit
)
refund.short_description = ugettext('Refund selected charges')
def dismiss(self, request, queryset):
dismissed = 0
for instance in queryset:
instance.skip_signal = True
instance.op_dismiss(save=True)
dismissed = dismissed + 1
if dismissed == 1:
message_bit = "1 instance was"
else:
message_bit = "%s instances were" % dismissed
self.message_user(
request,
"%s successfully dismissed." % message_bit
)
dismiss.short_description = ugettext('Dismiss selected instances')
def get_readonly_fields(self, request, obj=None):
return models.Charge.get_readonly_fields(obj)
| 33.587786
| 77
| 0.595341
| 902
| 8,800
| 5.66408
| 0.104213
| 0.070464
| 0.03993
| 0.028186
| 0.80231
| 0.796046
| 0.782345
| 0.725974
| 0.717557
| 0.697592
| 0
| 0.008406
| 0.310568
| 8,800
| 261
| 78
| 33.716475
| 0.83369
| 0.002955
| 0
| 0.716157
| 0
| 0
| 0.158117
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.078603
| false
| 0
| 0.0131
| 0.026201
| 0.218341
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
8804fe0e06348442721d3ba939cf99603279ecd5
| 85,380
|
py
|
Python
|
huaweicloud-sdk-as/huaweicloudsdkas/v1/as_client.py
|
Adek06/huaweicloud-sdk-python-v3
|
3d13b27d089e04a1ae567cd649b3c5509e0391d2
|
[
"Apache-2.0"
] | null | null | null |
huaweicloud-sdk-as/huaweicloudsdkas/v1/as_client.py
|
Adek06/huaweicloud-sdk-python-v3
|
3d13b27d089e04a1ae567cd649b3c5509e0391d2
|
[
"Apache-2.0"
] | null | null | null |
huaweicloud-sdk-as/huaweicloudsdkas/v1/as_client.py
|
Adek06/huaweicloud-sdk-python-v3
|
3d13b27d089e04a1ae567cd649b3c5509e0391d2
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
from __future__ import absolute_import
import datetime
import re
import importlib
import six
from huaweicloudsdkcore.client import Client, ClientBuilder
from huaweicloudsdkcore.exceptions import exceptions
from huaweicloudsdkcore.utils import http_utils
from huaweicloudsdkcore.sdk_stream_request import SdkStreamRequest
class AsClient(Client):
"""
:param configuration: .Configuration object for this client
:param pool_threads: The number of threads to use for async requests
to the API. More threads means more concurrent API requests.
"""
PRIMITIVE_TYPES = (float, bool, bytes, six.text_type) + six.integer_types
NATIVE_TYPES_MAPPING = {
'int': int,
'long': int if six.PY3 else long,
'float': float,
'str': str,
'bool': bool,
'date': datetime.date,
'datetime': datetime.datetime,
'object': object,
}
def __init__(self):
super(AsClient, self).__init__()
self.model_package = importlib.import_module("huaweicloudsdkas.v1.model")
self.preset_headers = {'User-Agent': 'HuaweiCloud-SDK-Python'}
@staticmethod
def new_builder(clazz):
return ClientBuilder(clazz)
def batch_delete_scaling_configs(self, request):
"""批量删除弹性伸缩配置
批量删除指定弹性伸缩配置。被伸缩组使用的伸缩配置不能被删除。单次最多删除伸缩配置个数为50。
:param BatchDeleteScalingConfigsRequest request
:return: BatchDeleteScalingConfigsResponse
"""
return self.batch_delete_scaling_configs_with_http_info(request)
def batch_delete_scaling_configs_with_http_info(self, request):
"""批量删除弹性伸缩配置
批量删除指定弹性伸缩配置。被伸缩组使用的伸缩配置不能被删除。单次最多删除伸缩配置个数为50。
:param BatchDeleteScalingConfigsRequest request
:return: BatchDeleteScalingConfigsResponse
"""
all_params = ['bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_configurations',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='BatchDeleteScalingConfigsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def complete_lifecycle_action(self, request):
"""伸缩实例生命周期回调
通过生命周期操作令牌或者通过实例ID和生命周期挂钩名称对伸缩实例指定的挂钩进行回调操作。如果在超时时间结束前已完成自定义操作,选择终止或继续完成生命周期操作。如果需要更多时间完成自定义操作,选择延长超时时间,实例保持等待状态的时间将增加1小时。只有实例的生命周期挂钩状态为 HANGING 时才可以进行回调操作。
:param CompleteLifecycleActionRequest request
:return: CompleteLifecycleActionResponse
"""
return self.complete_lifecycle_action_with_http_info(request)
def complete_lifecycle_action_with_http_info(self, request):
"""伸缩实例生命周期回调
通过生命周期操作令牌或者通过实例ID和生命周期挂钩名称对伸缩实例指定的挂钩进行回调操作。如果在超时时间结束前已完成自定义操作,选择终止或继续完成生命周期操作。如果需要更多时间完成自定义操作,选择延长超时时间,实例保持等待状态的时间将增加1小时。只有实例的生命周期挂钩状态为 HANGING 时才可以进行回调操作。
:param CompleteLifecycleActionRequest request
:return: CompleteLifecycleActionResponse
"""
all_params = ['scaling_group_id', 'bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_instance_hook/{scaling_group_id}/callback',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CompleteLifecycleActionResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_lify_cycle_hook(self, request):
"""创建生命周期挂钩
创建生命周期挂钩,可为伸缩组添加一个或多个生命周期挂钩,最多添加5个。添加生命周期挂钩后,当伸缩组进行伸缩活动时,实例将被生命周期挂钩挂起并置于等待状态(正在加入伸缩组或正在移出伸缩组),实例将保持此状态直至超时时间结束或者用户手动回调。用户能够在实例保持等待状态的时间段内执行自定义操作,例如,用户可以在新启动的实例上安装或配置软件,也可以在实例终止前从实例中下载日志文件。
:param CreateLifyCycleHookRequest request
:return: CreateLifyCycleHookResponse
"""
return self.create_lify_cycle_hook_with_http_info(request)
def create_lify_cycle_hook_with_http_info(self, request):
"""创建生命周期挂钩
创建生命周期挂钩,可为伸缩组添加一个或多个生命周期挂钩,最多添加5个。添加生命周期挂钩后,当伸缩组进行伸缩活动时,实例将被生命周期挂钩挂起并置于等待状态(正在加入伸缩组或正在移出伸缩组),实例将保持此状态直至超时时间结束或者用户手动回调。用户能够在实例保持等待状态的时间段内执行自定义操作,例如,用户可以在新启动的实例上安装或配置软件,也可以在实例终止前从实例中下载日志文件。
:param CreateLifyCycleHookRequest request
:return: CreateLifyCycleHookResponse
"""
all_params = ['scaling_group_id', 'bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_lifecycle_hook/{scaling_group_id}',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateLifyCycleHookResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_scaling_config(self, request):
"""创建弹性伸缩配置
创建弹性伸缩配置。伸缩配置是伸缩组内实例(弹性云服务器云主机)的模板,定义了伸缩组内待添加的实例的规格数据。伸缩配置与伸缩组是解耦的,同一伸缩配置可以被多个伸缩组使用。默认最多可以创建100个伸缩配置。
:param CreateScalingConfigRequest request
:return: CreateScalingConfigResponse
"""
return self.create_scaling_config_with_http_info(request)
def create_scaling_config_with_http_info(self, request):
"""创建弹性伸缩配置
创建弹性伸缩配置。伸缩配置是伸缩组内实例(弹性云服务器云主机)的模板,定义了伸缩组内待添加的实例的规格数据。伸缩配置与伸缩组是解耦的,同一伸缩配置可以被多个伸缩组使用。默认最多可以创建100个伸缩配置。
:param CreateScalingConfigRequest request
:return: CreateScalingConfigResponse
"""
all_params = ['bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_configuration',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateScalingConfigResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_scaling_group(self, request):
"""创建弹性伸缩组
伸缩组是具有相同应用场景的实例的集合,是启停伸缩策略和进行伸缩活动的基本单位。伸缩组内定义了最大实例数、期望实例数、最小实例数、虚拟私有云、子网、负载均衡等信息。默认最多可以创建10个伸缩组。如果伸缩组配置了负载均衡,在添加或移除实例时,会自动为实例绑定或解绑负载均衡监听器。如果伸缩组使用负载均衡健康检查方式,伸缩组中的实例需要启用负载均衡器的监听端口才能通过健康检查。端口启用可在安全组中进行配置,可参考添加安全组规则进行操作。
:param CreateScalingGroupRequest request
:return: CreateScalingGroupResponse
"""
return self.create_scaling_group_with_http_info(request)
def create_scaling_group_with_http_info(self, request):
"""创建弹性伸缩组
伸缩组是具有相同应用场景的实例的集合,是启停伸缩策略和进行伸缩活动的基本单位。伸缩组内定义了最大实例数、期望实例数、最小实例数、虚拟私有云、子网、负载均衡等信息。默认最多可以创建10个伸缩组。如果伸缩组配置了负载均衡,在添加或移除实例时,会自动为实例绑定或解绑负载均衡监听器。如果伸缩组使用负载均衡健康检查方式,伸缩组中的实例需要启用负载均衡器的监听端口才能通过健康检查。端口启用可在安全组中进行配置,可参考添加安全组规则进行操作。
:param CreateScalingGroupRequest request
:return: CreateScalingGroupResponse
"""
all_params = ['bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_group',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateScalingGroupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_scaling_notification(self, request):
"""配置伸缩组通知
给弹性伸缩组配置通知功能。每调用一次该接口,伸缩组即配置一个通知主题及其通知场景,每个伸缩组最多可以增加5个主题。通知主题由用户事先在SMN创建并进行订阅,当通知主题对应的通知场景出现时,伸缩组会向用户的订阅终端发送通知。
:param CreateScalingNotificationRequest request
:return: CreateScalingNotificationResponse
"""
return self.create_scaling_notification_with_http_info(request)
def create_scaling_notification_with_http_info(self, request):
"""配置伸缩组通知
给弹性伸缩组配置通知功能。每调用一次该接口,伸缩组即配置一个通知主题及其通知场景,每个伸缩组最多可以增加5个主题。通知主题由用户事先在SMN创建并进行订阅,当通知主题对应的通知场景出现时,伸缩组会向用户的订阅终端发送通知。
:param CreateScalingNotificationRequest request
:return: CreateScalingNotificationResponse
"""
all_params = ['scaling_group_id', 'bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_notification/{scaling_group_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateScalingNotificationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_scaling_policy(self, request):
"""创建弹性伸缩策略
创建弹性伸缩策略。伸缩策略定义了伸缩组内实例的扩张和收缩操作。如果执行伸缩策略造成伸缩组期望实例数与伸缩组内实例数不符,弹性伸缩会自动调整实例资源,以匹配期望实例数。当前伸缩策略支持告警触发策略,周期触发策略,定时触发策略。在策略执行具体动作中,可设置实例变化的个数,或根据当前实例的百分比数进行伸缩。
:param CreateScalingPolicyRequest request
:return: CreateScalingPolicyResponse
"""
return self.create_scaling_policy_with_http_info(request)
def create_scaling_policy_with_http_info(self, request):
"""创建弹性伸缩策略
创建弹性伸缩策略。伸缩策略定义了伸缩组内实例的扩张和收缩操作。如果执行伸缩策略造成伸缩组期望实例数与伸缩组内实例数不符,弹性伸缩会自动调整实例资源,以匹配期望实例数。当前伸缩策略支持告警触发策略,周期触发策略,定时触发策略。在策略执行具体动作中,可设置实例变化的个数,或根据当前实例的百分比数进行伸缩。
:param CreateScalingPolicyRequest request
:return: CreateScalingPolicyResponse
"""
all_params = ['bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_policy',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateScalingPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def create_scaling_tags(self, request):
"""创建标签
创建或删除指定资源的标签。每个伸缩组最多添加10个标签。
:param CreateScalingTagsRequest request
:return: CreateScalingTagsResponse
"""
return self.create_scaling_tags_with_http_info(request)
def create_scaling_tags_with_http_info(self, request):
"""创建标签
创建或删除指定资源的标签。每个伸缩组最多添加10个标签。
:param CreateScalingTagsRequest request
:return: CreateScalingTagsResponse
"""
all_params = ['resource_type', 'resource_id', 'bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'resource_type' in local_var_params:
path_params['resource_type'] = local_var_params['resource_type']
if 'resource_id' in local_var_params:
path_params['resource_id'] = local_var_params['resource_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/{resource_type}/{resource_id}/tags/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='CreateScalingTagsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_lifecycle_hook(self, request):
"""删除生命周期挂钩
删除一个指定生命周期挂钩。伸缩组进行伸缩活动时,不允许删除该伸缩组内的生命周期挂钩。
:param DeleteLifecycleHookRequest request
:return: DeleteLifecycleHookResponse
"""
return self.delete_lifecycle_hook_with_http_info(request)
def delete_lifecycle_hook_with_http_info(self, request):
"""删除生命周期挂钩
删除一个指定生命周期挂钩。伸缩组进行伸缩活动时,不允许删除该伸缩组内的生命周期挂钩。
:param DeleteLifecycleHookRequest request
:return: DeleteLifecycleHookResponse
"""
all_params = ['scaling_group_id', 'lifecycle_hook_name']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
if 'lifecycle_hook_name' in local_var_params:
path_params['lifecycle_hook_name'] = local_var_params['lifecycle_hook_name']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_lifecycle_hook/{scaling_group_id}/{lifecycle_hook_name}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteLifecycleHookResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_scaling_config(self, request):
"""删除弹性伸缩配置
删除一个指定弹性伸缩配置。
:param DeleteScalingConfigRequest request
:return: DeleteScalingConfigResponse
"""
return self.delete_scaling_config_with_http_info(request)
def delete_scaling_config_with_http_info(self, request):
"""删除弹性伸缩配置
删除一个指定弹性伸缩配置。
:param DeleteScalingConfigRequest request
:return: DeleteScalingConfigResponse
"""
all_params = ['scaling_configuration_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_configuration_id' in local_var_params:
path_params['scaling_configuration_id'] = local_var_params['scaling_configuration_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_configuration/{scaling_configuration_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteScalingConfigResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_scaling_group(self, request):
"""删除弹性伸缩组
删除一个指定弹性伸缩组。force_delete属性表示如果伸缩组存在ECS实例或正在进行伸缩活动,是否强制删除伸缩组并移出和释放ECS实例。默认值为no,表示不强制删除伸缩组。如果force_delete的值为no,必须满足以下两个条件,才能删除伸缩组:条件一:伸缩组没有正在进行的伸缩活动。条件二:伸缩组当前的ECS实例数量(current_instance_number)为0。如果force_delete的值为yes,伸缩组会被置于DELETING状态,拒绝接收新的伸缩活动请求,然后等待已有的伸缩活动完成,最后将伸缩组内所有ECS实例移出伸缩组(用户手动添加的ECS实例会被移出伸缩组,弹性伸缩自动创建的ECS实例会被自动删除)并删除伸缩组。
:param DeleteScalingGroupRequest request
:return: DeleteScalingGroupResponse
"""
return self.delete_scaling_group_with_http_info(request)
def delete_scaling_group_with_http_info(self, request):
"""删除弹性伸缩组
删除一个指定弹性伸缩组。force_delete属性表示如果伸缩组存在ECS实例或正在进行伸缩活动,是否强制删除伸缩组并移出和释放ECS实例。默认值为no,表示不强制删除伸缩组。如果force_delete的值为no,必须满足以下两个条件,才能删除伸缩组:条件一:伸缩组没有正在进行的伸缩活动。条件二:伸缩组当前的ECS实例数量(current_instance_number)为0。如果force_delete的值为yes,伸缩组会被置于DELETING状态,拒绝接收新的伸缩活动请求,然后等待已有的伸缩活动完成,最后将伸缩组内所有ECS实例移出伸缩组(用户手动添加的ECS实例会被移出伸缩组,弹性伸缩自动创建的ECS实例会被自动删除)并删除伸缩组。
:param DeleteScalingGroupRequest request
:return: DeleteScalingGroupResponse
"""
all_params = ['scaling_group_id', 'force_delete']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
if 'force_delete' in local_var_params:
query_params.append(('force_delete', local_var_params['force_delete']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_group/{scaling_group_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteScalingGroupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_scaling_instance(self, request):
"""移出弹性伸缩组实例
从弹性伸缩组中移出一个指定实例。实例处于INSERVICE且移出后实例数不能小于伸缩组的最小实例数时才可以移出。当伸缩组没有伸缩活动时,才能移出实例。
:param DeleteScalingInstanceRequest request
:return: DeleteScalingInstanceResponse
"""
return self.delete_scaling_instance_with_http_info(request)
def delete_scaling_instance_with_http_info(self, request):
"""移出弹性伸缩组实例
从弹性伸缩组中移出一个指定实例。实例处于INSERVICE且移出后实例数不能小于伸缩组的最小实例数时才可以移出。当伸缩组没有伸缩活动时,才能移出实例。
:param DeleteScalingInstanceRequest request
:return: DeleteScalingInstanceResponse
"""
all_params = ['instance_id', 'instance_delete']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'instance_id' in local_var_params:
path_params['instance_id'] = local_var_params['instance_id']
query_params = []
if 'instance_delete' in local_var_params:
query_params.append(('instance_delete', local_var_params['instance_delete']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_group_instance/{instance_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteScalingInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_scaling_notification(self, request):
"""删除伸缩组通知
删除指定的弹性伸缩组中指定的通知。
:param DeleteScalingNotificationRequest request
:return: DeleteScalingNotificationResponse
"""
return self.delete_scaling_notification_with_http_info(request)
def delete_scaling_notification_with_http_info(self, request):
"""删除伸缩组通知
删除指定的弹性伸缩组中指定的通知。
:param DeleteScalingNotificationRequest request
:return: DeleteScalingNotificationResponse
"""
all_params = ['scaling_group_id', 'topic_urn']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
if 'topic_urn' in local_var_params:
path_params['topic_urn'] = local_var_params['topic_urn']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_notification/{scaling_group_id}/{topic_urn}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteScalingNotificationResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_scaling_policy(self, request):
"""删除弹性伸缩策略
删除一个指定弹性伸缩策略。
:param DeleteScalingPolicyRequest request
:return: DeleteScalingPolicyResponse
"""
return self.delete_scaling_policy_with_http_info(request)
def delete_scaling_policy_with_http_info(self, request):
"""删除弹性伸缩策略
删除一个指定弹性伸缩策略。
:param DeleteScalingPolicyRequest request
:return: DeleteScalingPolicyResponse
"""
all_params = ['scaling_policy_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_policy_id' in local_var_params:
path_params['scaling_policy_id'] = local_var_params['scaling_policy_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_policy/{scaling_policy_id}',
method='DELETE',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteScalingPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def delete_scaling_tags(self, request):
"""删除标签
创建或删除指定资源的标签。每个伸缩组最多添加10个标签。
:param DeleteScalingTagsRequest request
:return: DeleteScalingTagsResponse
"""
return self.delete_scaling_tags_with_http_info(request)
def delete_scaling_tags_with_http_info(self, request):
"""删除标签
创建或删除指定资源的标签。每个伸缩组最多添加10个标签。
:param DeleteScalingTagsRequest request
:return: DeleteScalingTagsResponse
"""
all_params = ['resource_type', 'resource_id', 'bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'resource_type' in local_var_params:
path_params['resource_type'] = local_var_params['resource_type']
if 'resource_id' in local_var_params:
path_params['resource_id'] = local_var_params['resource_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/{resource_type}/{resource_id}/tags/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='DeleteScalingTagsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def enable_or_disable_scaling_group(self, request):
"""启用或停止弹性伸缩组
启用或停止一个指定弹性伸缩组。已停用状态的伸缩组,不会自动触发任何伸缩活动。当伸缩组正在进行伸缩活动,即使停用,正在进行的伸缩活动也不会立即停止。
:param EnableOrDisableScalingGroupRequest request
:return: EnableOrDisableScalingGroupResponse
"""
return self.enable_or_disable_scaling_group_with_http_info(request)
def enable_or_disable_scaling_group_with_http_info(self, request):
"""启用或停止弹性伸缩组
启用或停止一个指定弹性伸缩组。已停用状态的伸缩组,不会自动触发任何伸缩活动。当伸缩组正在进行伸缩活动,即使停用,正在进行的伸缩活动也不会立即停止。
:param EnableOrDisableScalingGroupRequest request
:return: EnableOrDisableScalingGroupResponse
"""
all_params = ['scaling_group_id', 'body_param']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_group/{scaling_group_id}/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='EnableOrDisableScalingGroupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def execute_scaling_policy(self, request):
"""执行或启用或停止弹性伸缩策略。
立即执行或启用或停止一个指定弹性伸缩策略。当伸缩组、伸缩策略状态处于INSERVICE时,伸缩策略才能被正确执行,否则会执行失败。
:param ExecuteScalingPolicyRequest request
:return: ExecuteScalingPolicyResponse
"""
return self.execute_scaling_policy_with_http_info(request)
def execute_scaling_policy_with_http_info(self, request):
"""执行或启用或停止弹性伸缩策略。
立即执行或启用或停止一个指定弹性伸缩策略。当伸缩组、伸缩策略状态处于INSERVICE时,伸缩策略才能被正确执行,否则会执行失败。
:param ExecuteScalingPolicyRequest request
:return: ExecuteScalingPolicyResponse
"""
all_params = ['scaling_policy_id', 'bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_policy_id' in local_var_params:
path_params['scaling_policy_id'] = local_var_params['scaling_policy_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_policy/{scaling_policy_id}/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ExecuteScalingPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_hook_instances(self, request):
"""查询伸缩实例挂起信息
添加生命周期挂钩后,当伸缩组进行伸缩活动时,实例将被挂钩挂起并置于等待状态,根据输入条件过滤查询弹性伸缩组中伸缩实例的挂起信息。可根据实例ID进行条件过滤查询。若不加过滤条件默认查询指定伸缩组内所有实例挂起信息。
:param ListHookInstancesRequest request
:return: ListHookInstancesResponse
"""
return self.list_hook_instances_with_http_info(request)
def list_hook_instances_with_http_info(self, request):
"""查询伸缩实例挂起信息
添加生命周期挂钩后,当伸缩组进行伸缩活动时,实例将被挂钩挂起并置于等待状态,根据输入条件过滤查询弹性伸缩组中伸缩实例的挂起信息。可根据实例ID进行条件过滤查询。若不加过滤条件默认查询指定伸缩组内所有实例挂起信息。
:param ListHookInstancesRequest request
:return: ListHookInstancesResponse
"""
all_params = ['scaling_group_id', 'instance_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
if 'instance_id' in local_var_params:
query_params.append(('instance_id', local_var_params['instance_id']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_instance_hook/{scaling_group_id}/list',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListHookInstancesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_life_cycle_hooks(self, request):
"""查询生命周期挂钩列表
根据伸缩组ID查询生命周期挂钩列表。
:param ListLifeCycleHooksRequest request
:return: ListLifeCycleHooksResponse
"""
return self.list_life_cycle_hooks_with_http_info(request)
def list_life_cycle_hooks_with_http_info(self, request):
"""查询生命周期挂钩列表
根据伸缩组ID查询生命周期挂钩列表。
:param ListLifeCycleHooksRequest request
:return: ListLifeCycleHooksResponse
"""
all_params = ['scaling_group_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_lifecycle_hook/{scaling_group_id}/list',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListLifeCycleHooksResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_resource_instances(self, request):
"""查询资源实例
根据项目ID查询指定资源类型的资源实例。资源、资源tag默认按照创建时间倒序。
:param ListResourceInstancesRequest request
:return: ListResourceInstancesResponse
"""
return self.list_resource_instances_with_http_info(request)
def list_resource_instances_with_http_info(self, request):
"""查询资源实例
根据项目ID查询指定资源类型的资源实例。资源、资源tag默认按照创建时间倒序。
:param ListResourceInstancesRequest request
:return: ListResourceInstancesResponse
"""
all_params = ['resource_type', 'bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'resource_type' in local_var_params:
path_params['resource_type'] = local_var_params['resource_type']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/{resource_type}/resource_instances/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListResourceInstancesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_scaling_activity_logs(self, request):
"""查询伸缩活动日志
根据输入条件过滤查询伸缩活动日志。查询结果分页显示。可根据起始时间,截止时间,起始行号,记录数进行条件过滤查询。若不加过滤条件默认查询最多20条伸缩活动日志信息。
:param ListScalingActivityLogsRequest request
:return: ListScalingActivityLogsResponse
"""
return self.list_scaling_activity_logs_with_http_info(request)
def list_scaling_activity_logs_with_http_info(self, request):
"""查询伸缩活动日志
根据输入条件过滤查询伸缩活动日志。查询结果分页显示。可根据起始时间,截止时间,起始行号,记录数进行条件过滤查询。若不加过滤条件默认查询最多20条伸缩活动日志信息。
:param ListScalingActivityLogsRequest request
:return: ListScalingActivityLogsResponse
"""
all_params = ['scaling_group_id', 'start_time', 'end_time', 'start_number', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
if 'start_number' in local_var_params:
query_params.append(('start_number', local_var_params['start_number']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_activity_log/{scaling_group_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListScalingActivityLogsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_scaling_configs(self, request):
"""查询弹性伸缩配置列表
根据输入条件过滤查询弹性伸缩配置。查询结果分页显示。可以根据伸缩配置名称,镜像ID,起始行号,记录条数进行条件过滤查询。若不加过滤条件默认最多查询租户下20条伸缩配置信息。
:param ListScalingConfigsRequest request
:return: ListScalingConfigsResponse
"""
return self.list_scaling_configs_with_http_info(request)
def list_scaling_configs_with_http_info(self, request):
"""查询弹性伸缩配置列表
根据输入条件过滤查询弹性伸缩配置。查询结果分页显示。可以根据伸缩配置名称,镜像ID,起始行号,记录条数进行条件过滤查询。若不加过滤条件默认最多查询租户下20条伸缩配置信息。
:param ListScalingConfigsRequest request
:return: ListScalingConfigsResponse
"""
all_params = ['scaling_configuration_name', 'image_id', 'start_number', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'scaling_configuration_name' in local_var_params:
query_params.append(('scaling_configuration_name', local_var_params['scaling_configuration_name']))
if 'image_id' in local_var_params:
query_params.append(('image_id', local_var_params['image_id']))
if 'start_number' in local_var_params:
query_params.append(('start_number', local_var_params['start_number']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_configuration',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListScalingConfigsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_scaling_groups(self, request):
"""查询弹性伸缩组列表
根据输入条件过滤查询弹性伸缩组列表。查询结果分页显示。可根据伸缩组名称,伸缩配置ID,伸缩组状态,企业项目ID,起始行号,记录条数进行条件过滤查询。若不加过滤条件默认最多查询租户下20条伸缩组信息。
:param ListScalingGroupsRequest request
:return: ListScalingGroupsResponse
"""
return self.list_scaling_groups_with_http_info(request)
def list_scaling_groups_with_http_info(self, request):
"""查询弹性伸缩组列表
根据输入条件过滤查询弹性伸缩组列表。查询结果分页显示。可根据伸缩组名称,伸缩配置ID,伸缩组状态,企业项目ID,起始行号,记录条数进行条件过滤查询。若不加过滤条件默认最多查询租户下20条伸缩组信息。
:param ListScalingGroupsRequest request
:return: ListScalingGroupsResponse
"""
all_params = ['scaling_group_name', 'scaling_configuration_id', 'scaling_group_status', 'start_number', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
if 'scaling_group_name' in local_var_params:
query_params.append(('scaling_group_name', local_var_params['scaling_group_name']))
if 'scaling_configuration_id' in local_var_params:
query_params.append(('scaling_configuration_id', local_var_params['scaling_configuration_id']))
if 'scaling_group_status' in local_var_params:
query_params.append(('scaling_group_status', local_var_params['scaling_group_status']))
if 'start_number' in local_var_params:
query_params.append(('start_number', local_var_params['start_number']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_group',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListScalingGroupsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_scaling_instances(self, request):
"""查询弹性伸缩组中的实例列表
根据输入条件过滤查询弹性伸缩组中实例信息。查询结果分页显示。可根据实例在伸缩组中的生命周期状态,实例健康状态,实例保护状态,起始行号,记录条数进行条件过滤查询。若不加过滤条件默认查询组内最多20条实例信息
:param ListScalingInstancesRequest request
:return: ListScalingInstancesResponse
"""
return self.list_scaling_instances_with_http_info(request)
def list_scaling_instances_with_http_info(self, request):
"""查询弹性伸缩组中的实例列表
根据输入条件过滤查询弹性伸缩组中实例信息。查询结果分页显示。可根据实例在伸缩组中的生命周期状态,实例健康状态,实例保护状态,起始行号,记录条数进行条件过滤查询。若不加过滤条件默认查询组内最多20条实例信息
:param ListScalingInstancesRequest request
:return: ListScalingInstancesResponse
"""
all_params = ['scaling_group_id', 'life_cycle_state', 'health_status', 'protect_from_scaling_down', 'start_number', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
if 'life_cycle_state' in local_var_params:
query_params.append(('life_cycle_state', local_var_params['life_cycle_state']))
if 'health_status' in local_var_params:
query_params.append(('health_status', local_var_params['health_status']))
if 'protect_from_scaling_down' in local_var_params:
query_params.append(('protect_from_scaling_down', local_var_params['protect_from_scaling_down']))
if 'start_number' in local_var_params:
query_params.append(('start_number', local_var_params['start_number']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_group_instance/{scaling_group_id}/list',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListScalingInstancesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_scaling_notifications(self, request):
"""查询伸缩组通知列表
根据伸缩组ID查询指定弹性伸缩组的通知列表。
:param ListScalingNotificationsRequest request
:return: ListScalingNotificationsResponse
"""
return self.list_scaling_notifications_with_http_info(request)
def list_scaling_notifications_with_http_info(self, request):
"""查询伸缩组通知列表
根据伸缩组ID查询指定弹性伸缩组的通知列表。
:param ListScalingNotificationsRequest request
:return: ListScalingNotificationsResponse
"""
all_params = ['scaling_group_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_notification/{scaling_group_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListScalingNotificationsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_scaling_policies(self, request):
"""查询弹性伸缩策略列表
根据输入条件过滤查询弹性伸缩策略。查询结果分页显示。可根据伸缩策略名称,策略类型,伸缩策略ID,起始行号,记录数进行条件过滤查询。若不加过滤条件默认查询租户下指定伸缩组内最多20条伸缩策略信息。
:param ListScalingPoliciesRequest request
:return: ListScalingPoliciesResponse
"""
return self.list_scaling_policies_with_http_info(request)
def list_scaling_policies_with_http_info(self, request):
"""查询弹性伸缩策略列表
根据输入条件过滤查询弹性伸缩策略。查询结果分页显示。可根据伸缩策略名称,策略类型,伸缩策略ID,起始行号,记录数进行条件过滤查询。若不加过滤条件默认查询租户下指定伸缩组内最多20条伸缩策略信息。
:param ListScalingPoliciesRequest request
:return: ListScalingPoliciesResponse
"""
all_params = ['scaling_group_id', 'scaling_policy_name', 'scaling_policy_type', 'scaling_policy_id', 'start_number', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
if 'scaling_policy_name' in local_var_params:
query_params.append(('scaling_policy_name', local_var_params['scaling_policy_name']))
if 'scaling_policy_type' in local_var_params:
query_params.append(('scaling_policy_type', local_var_params['scaling_policy_type']))
if 'scaling_policy_id' in local_var_params:
query_params.append(('scaling_policy_id', local_var_params['scaling_policy_id']))
if 'start_number' in local_var_params:
query_params.append(('start_number', local_var_params['start_number']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_policy/{scaling_group_id}/list',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListScalingPoliciesResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_scaling_policy_execute_logs(self, request):
"""查询策略执行日志
根据输入条件过滤查询策略执行的历史记录。查询结果分页显示。可根据日志ID,伸缩资源类型,伸缩资源ID,策略执行类型,查询额起始,查询截止时间,查询起始行号,查询记录数进行条件过滤查询。若不加过滤条件默认查询最多20条策略执行日志信息。
:param ListScalingPolicyExecuteLogsRequest request
:return: ListScalingPolicyExecuteLogsResponse
"""
return self.list_scaling_policy_execute_logs_with_http_info(request)
def list_scaling_policy_execute_logs_with_http_info(self, request):
"""查询策略执行日志
根据输入条件过滤查询策略执行的历史记录。查询结果分页显示。可根据日志ID,伸缩资源类型,伸缩资源ID,策略执行类型,查询额起始,查询截止时间,查询起始行号,查询记录数进行条件过滤查询。若不加过滤条件默认查询最多20条策略执行日志信息。
:param ListScalingPolicyExecuteLogsRequest request
:return: ListScalingPolicyExecuteLogsResponse
"""
all_params = ['scaling_policy_id', 'log_id', 'scaling_resource_type', 'scaling_resource_id', 'execute_type', 'start_time', 'end_time', 'start_number', 'limit']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_policy_id' in local_var_params:
path_params['scaling_policy_id'] = local_var_params['scaling_policy_id']
query_params = []
if 'log_id' in local_var_params:
query_params.append(('log_id', local_var_params['log_id']))
if 'scaling_resource_type' in local_var_params:
query_params.append(('scaling_resource_type', local_var_params['scaling_resource_type']))
if 'scaling_resource_id' in local_var_params:
query_params.append(('scaling_resource_id', local_var_params['scaling_resource_id']))
if 'execute_type' in local_var_params:
query_params.append(('execute_type', local_var_params['execute_type']))
if 'start_time' in local_var_params:
query_params.append(('start_time', local_var_params['start_time']))
if 'end_time' in local_var_params:
query_params.append(('end_time', local_var_params['end_time']))
if 'start_number' in local_var_params:
query_params.append(('start_number', local_var_params['start_number']))
if 'limit' in local_var_params:
query_params.append(('limit', local_var_params['limit']))
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_policy_execute_log/{scaling_policy_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListScalingPolicyExecuteLogsResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_scaling_tag_infos_by_resource_id(self, request):
"""查询资源标签
根据项目ID和资源ID查询指定资源类型的资源标签列表。
:param ListScalingTagInfosByResourceIdRequest request
:return: ListScalingTagInfosByResourceIdResponse
"""
return self.list_scaling_tag_infos_by_resource_id_with_http_info(request)
def list_scaling_tag_infos_by_resource_id_with_http_info(self, request):
"""查询资源标签
根据项目ID和资源ID查询指定资源类型的资源标签列表。
:param ListScalingTagInfosByResourceIdRequest request
:return: ListScalingTagInfosByResourceIdResponse
"""
all_params = ['resource_type', 'resource_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'resource_type' in local_var_params:
path_params['resource_type'] = local_var_params['resource_type']
if 'resource_id' in local_var_params:
path_params['resource_id'] = local_var_params['resource_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/{resource_type}/{resource_id}/tags',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListScalingTagInfosByResourceIdResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def list_scaling_tag_infos_by_tenant_id(self, request):
"""查询标签
根据项目ID查询指定资源类型的标签列表。
:param ListScalingTagInfosByTenantIdRequest request
:return: ListScalingTagInfosByTenantIdResponse
"""
return self.list_scaling_tag_infos_by_tenant_id_with_http_info(request)
def list_scaling_tag_infos_by_tenant_id_with_http_info(self, request):
"""查询标签
根据项目ID查询指定资源类型的标签列表。
:param ListScalingTagInfosByTenantIdRequest request
:return: ListScalingTagInfosByTenantIdResponse
"""
all_params = ['resource_type']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'resource_type' in local_var_params:
path_params['resource_type'] = local_var_params['resource_type']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/{resource_type}/tags',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ListScalingTagInfosByTenantIdResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_life_cycle_hook(self, request):
"""查询生命周期挂钩详情
根据伸缩组ID及生命周期挂钩名称查询指定的生命周期挂钩详情。
:param ShowLifeCycleHookRequest request
:return: ShowLifeCycleHookResponse
"""
return self.show_life_cycle_hook_with_http_info(request)
def show_life_cycle_hook_with_http_info(self, request):
"""查询生命周期挂钩详情
根据伸缩组ID及生命周期挂钩名称查询指定的生命周期挂钩详情。
:param ShowLifeCycleHookRequest request
:return: ShowLifeCycleHookResponse
"""
all_params = ['scaling_group_id', 'lifecycle_hook_name']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
if 'lifecycle_hook_name' in local_var_params:
path_params['lifecycle_hook_name'] = local_var_params['lifecycle_hook_name']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_lifecycle_hook/{scaling_group_id}/{lifecycle_hook_name}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowLifeCycleHookResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_policy_and_instance_quota(self, request):
"""查询弹性伸缩策略和伸缩实例配额
根据伸缩组ID查询指定弹性伸缩组下的伸缩策略和伸缩实例的配额总数及已使用配额数。
:param ShowPolicyAndInstanceQuotaRequest request
:return: ShowPolicyAndInstanceQuotaResponse
"""
return self.show_policy_and_instance_quota_with_http_info(request)
def show_policy_and_instance_quota_with_http_info(self, request):
"""查询弹性伸缩策略和伸缩实例配额
根据伸缩组ID查询指定弹性伸缩组下的伸缩策略和伸缩实例的配额总数及已使用配额数。
:param ShowPolicyAndInstanceQuotaRequest request
:return: ShowPolicyAndInstanceQuotaResponse
"""
all_params = ['scaling_group_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/quotas/{scaling_group_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowPolicyAndInstanceQuotaResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_resource_quota(self, request):
"""查询配额
查询指定租户下的弹性伸缩组、伸缩配置、伸缩带宽策略、伸缩策略和伸缩实例的配额总数及已使用配额数。
:param ShowResourceQuotaRequest request
:return: ShowResourceQuotaResponse
"""
return self.show_resource_quota_with_http_info(request)
def show_resource_quota_with_http_info(self, request):
"""查询配额
查询指定租户下的弹性伸缩组、伸缩配置、伸缩带宽策略、伸缩策略和伸缩实例的配额总数及已使用配额数。
:param ShowResourceQuotaRequest request
:return: ShowResourceQuotaResponse
"""
all_params = []
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/quotas',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowResourceQuotaResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_scaling_config(self, request):
"""查询弹性伸缩配置详情
根据伸缩配置ID查询一个弹性伸缩配置的详细信息。
:param ShowScalingConfigRequest request
:return: ShowScalingConfigResponse
"""
return self.show_scaling_config_with_http_info(request)
def show_scaling_config_with_http_info(self, request):
"""查询弹性伸缩配置详情
根据伸缩配置ID查询一个弹性伸缩配置的详细信息。
:param ShowScalingConfigRequest request
:return: ShowScalingConfigResponse
"""
all_params = ['scaling_configuration_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_configuration_id' in local_var_params:
path_params['scaling_configuration_id'] = local_var_params['scaling_configuration_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_configuration/{scaling_configuration_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowScalingConfigResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_scaling_group(self, request):
"""查询弹性伸缩组详情
查询一个指定弹性伸缩组详情。
:param ShowScalingGroupRequest request
:return: ShowScalingGroupResponse
"""
return self.show_scaling_group_with_http_info(request)
def show_scaling_group_with_http_info(self, request):
"""查询弹性伸缩组详情
查询一个指定弹性伸缩组详情。
:param ShowScalingGroupRequest request
:return: ShowScalingGroupResponse
"""
all_params = ['scaling_group_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_group/{scaling_group_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowScalingGroupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def show_scaling_policy(self, request):
"""查询弹性伸缩策略详情
查询指定弹性伸缩策略信息。
:param ShowScalingPolicyRequest request
:return: ShowScalingPolicyResponse
"""
return self.show_scaling_policy_with_http_info(request)
def show_scaling_policy_with_http_info(self, request):
"""查询弹性伸缩策略详情
查询指定弹性伸缩策略信息。
:param ShowScalingPolicyRequest request
:return: ShowScalingPolicyResponse
"""
all_params = ['scaling_policy_id']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_policy_id' in local_var_params:
path_params['scaling_policy_id'] = local_var_params['scaling_policy_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_policy/{scaling_policy_id}',
method='GET',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='ShowScalingPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_life_cycle_hook(self, request):
"""修改生命周期挂钩
修改一个指定生命周期挂钩中的信息。
:param UpdateLifeCycleHookRequest request
:return: UpdateLifeCycleHookResponse
"""
return self.update_life_cycle_hook_with_http_info(request)
def update_life_cycle_hook_with_http_info(self, request):
"""修改生命周期挂钩
修改一个指定生命周期挂钩中的信息。
:param UpdateLifeCycleHookRequest request
:return: UpdateLifeCycleHookResponse
"""
all_params = ['scaling_group_id', 'lifecycle_hook_name', 'bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
if 'lifecycle_hook_name' in local_var_params:
path_params['lifecycle_hook_name'] = local_var_params['lifecycle_hook_name']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_lifecycle_hook/{scaling_group_id}/{lifecycle_hook_name}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateLifeCycleHookResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_scaling_group(self, request):
"""修改弹性伸缩组
修改一个指定弹性伸缩组中的信息。更换伸缩组的伸缩配置,伸缩组中已经存在的使用之前伸缩配置创建的云服务器云主机不受影响。伸缩组为没有正在进行的伸缩活动时,可以修改伸缩组的子网、可用区和负载均衡配置。当伸缩组的期望实例数改变时,会触发伸缩活动加入或移出实例。期望实例数必须大于或等于最小实例数,必须小于或等于最大实例数。
:param UpdateScalingGroupRequest request
:return: UpdateScalingGroupResponse
"""
return self.update_scaling_group_with_http_info(request)
def update_scaling_group_with_http_info(self, request):
"""修改弹性伸缩组
修改一个指定弹性伸缩组中的信息。更换伸缩组的伸缩配置,伸缩组中已经存在的使用之前伸缩配置创建的云服务器云主机不受影响。伸缩组为没有正在进行的伸缩活动时,可以修改伸缩组的子网、可用区和负载均衡配置。当伸缩组的期望实例数改变时,会触发伸缩活动加入或移出实例。期望实例数必须大于或等于最小实例数,必须小于或等于最大实例数。
:param UpdateScalingGroupRequest request
:return: UpdateScalingGroupResponse
"""
all_params = ['scaling_group_id', 'bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_group/{scaling_group_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateScalingGroupResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_scaling_group_instance(self, request):
"""批量操作实例
批量移出伸缩组中的实例或批量添加伸缩组外的实例。批量对伸缩组中的实例设置或取消其实例保护属性。批量将伸缩组中的实例转入或移出备用状态。
:param UpdateScalingGroupInstanceRequest request
:return: UpdateScalingGroupInstanceResponse
"""
return self.update_scaling_group_instance_with_http_info(request)
def update_scaling_group_instance_with_http_info(self, request):
"""批量操作实例
批量移出伸缩组中的实例或批量添加伸缩组外的实例。批量对伸缩组中的实例设置或取消其实例保护属性。批量将伸缩组中的实例转入或移出备用状态。
:param UpdateScalingGroupInstanceRequest request
:return: UpdateScalingGroupInstanceResponse
"""
all_params = ['scaling_group_id', 'bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_group_id' in local_var_params:
path_params['scaling_group_id'] = local_var_params['scaling_group_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_group_instance/{scaling_group_id}/action',
method='POST',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateScalingGroupInstanceResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def update_scaling_policy(self, request):
"""修改弹性伸缩策略
修改指定弹性伸缩策略。
:param UpdateScalingPolicyRequest request
:return: UpdateScalingPolicyResponse
"""
return self.update_scaling_policy_with_http_info(request)
def update_scaling_policy_with_http_info(self, request):
"""修改弹性伸缩策略
修改指定弹性伸缩策略。
:param UpdateScalingPolicyRequest request
:return: UpdateScalingPolicyResponse
"""
all_params = ['scaling_policy_id', 'bodyparam']
local_var_params = {}
for attr in request.attribute_map:
if hasattr(request, attr):
local_var_params[attr] = getattr(request, attr)
collection_formats = {}
path_params = {}
if 'scaling_policy_id' in local_var_params:
path_params['scaling_policy_id'] = local_var_params['scaling_policy_id']
query_params = []
header_params = {}
form_params = {}
body_params = None
if 'body' in local_var_params:
body_params = local_var_params['body']
if isinstance(request, SdkStreamRequest):
body_params = request.get_file_stream()
response_headers = []
header_params['Content-Type'] = http_utils.select_header_content_type(
['application/json;charset=UTF-8'])
auth_settings = []
return self.call_api(
resource_path='/autoscaling-api/v1/{project_id}/scaling_policy/{scaling_policy_id}',
method='PUT',
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body_params,
post_params=form_params,
response_type='UpdateScalingPolicyResponse',
response_headers=response_headers,
auth_settings=auth_settings,
collection_formats=collection_formats,
request_type=request.__class__.__name__)
def call_api(self, resource_path, method, path_params=None, query_params=None, header_params=None, body=None,
post_params=None, response_type=None, response_headers=None, auth_settings=None,
collection_formats=None, request_type=None):
"""Makes the HTTP request and returns deserialized data.
:param resource_path: Path to method endpoint.
:param method: Method to call.
:param path_params: Path parameters in the url.
:param query_params: Query parameters in the url.
:param header_params: Header parameters to be placed in the request header.
:param body: Request body.
:param post_params dict: Request post form parameters,
for `application/x-www-form-urlencoded`, `multipart/form-data`.
:param auth_settings list: Auth Settings names for the request.
:param response_type: Response data type.
:param response_headers: Header should be added to response data.
:param collection_formats: dict of collection formats for path, query,
header, and post parameters.
:param request_type: Request data type.
:return:
Return the response directly.
"""
return self.do_http_request(
method=method,
resource_path=resource_path,
path_params=path_params,
query_params=query_params,
header_params=header_params,
body=body,
post_params=post_params,
response_type=response_type,
response_headers=response_headers,
collection_formats=collection_formats,
request_type=request_type)
| 32.965251
| 334
| 0.65465
| 8,298
| 85,380
| 6.321885
| 0.059412
| 0.03904
| 0.06832
| 0.027145
| 0.918889
| 0.905412
| 0.891763
| 0.855392
| 0.838407
| 0.767628
| 0
| 0.001782
| 0.263926
| 85,380
| 2,589
| 335
| 32.977984
| 0.832941
| 0.166175
| 0
| 0.818057
| 0
| 0
| 0.135656
| 0.072684
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055404
| false
| 0
| 0.00684
| 0.000684
| 0.119015
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
7163eb38139e50afb89bdd7489ca16425eb6fa3a
| 239
|
py
|
Python
|
tests/parser/recursive_aggregates.5.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/recursive_aggregates.5.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
tests/parser/recursive_aggregates.5.test.py
|
veltri/DLV2
|
944aaef803aa75e7ec51d7e0c2b0d964687fdd0e
|
[
"Apache-2.0"
] | null | null | null |
input = """
d(1).
d(2).
d(3).
p(5).
p(6).
d(4) :- #max{V : d(V)} > 3.
p(7) :- #count{X : p(X)} > 2.
"""
output = """
d(1).
d(2).
d(3).
p(5).
p(6).
d(4) :- #max{V : d(V)} > 3.
p(7) :- #count{X : p(X)} > 2.
"""
| 8.851852
| 30
| 0.297071
| 50
| 239
| 1.42
| 0.3
| 0.112676
| 0.084507
| 0.112676
| 0.84507
| 0.84507
| 0.84507
| 0.84507
| 0.84507
| 0.84507
| 0
| 0.110429
| 0.317992
| 239
| 26
| 31
| 9.192308
| 0.325153
| 0
| 0
| 0.888889
| 0
| 0
| 0.857143
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
719972fb81d5c7c9ca79507dd0490152adb2a6b1
| 155
|
py
|
Python
|
loldib/getratings/models/NA/na_trundle/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_trundle/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
loldib/getratings/models/NA/na_trundle/__init__.py
|
koliupy/loldib
|
c9ab94deb07213cdc42b5a7c26467cdafaf81b7f
|
[
"Apache-2.0"
] | null | null | null |
from .na_trundle_top import *
from .na_trundle_jng import *
from .na_trundle_mid import *
from .na_trundle_bot import *
from .na_trundle_sup import *
| 25.833333
| 30
| 0.774194
| 25
| 155
| 4.4
| 0.36
| 0.272727
| 0.590909
| 0.690909
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.16129
| 155
| 5
| 31
| 31
| 0.846154
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
71b99a26472581183ef381df6a9f540dca1747d2
| 1,060
|
py
|
Python
|
Python_Basics/05_Nested_Conditional_Statements_Exercise/09_Operations_Between_Numbers.py
|
Dochko0/Python
|
e9612c4e842cfd3d9a733526cc7485765ef2238f
|
[
"MIT"
] | null | null | null |
Python_Basics/05_Nested_Conditional_Statements_Exercise/09_Operations_Between_Numbers.py
|
Dochko0/Python
|
e9612c4e842cfd3d9a733526cc7485765ef2238f
|
[
"MIT"
] | null | null | null |
Python_Basics/05_Nested_Conditional_Statements_Exercise/09_Operations_Between_Numbers.py
|
Dochko0/Python
|
e9612c4e842cfd3d9a733526cc7485765ef2238f
|
[
"MIT"
] | null | null | null |
num1 = float(input())
num2 = float(input())
operator = input()
if operator == '+':
result = num1 + num2
if result % 2 == 0:
print(f'{num1:.0f} + {num2:.0f} = {result:.0f} - even')
else:
print(f'{num1:.0f} + {num2:.0f} = {result:.0f} - odd')
elif operator == '-':
result = num1 - num2
if result % 2 == 0:
print(f'{num1:.0f} - {num2:.0f} = {result:.0f} - even')
else:
print(f'{num1:.0f} - {num2:.0f} = {result:.0f} - odd')
elif operator == '*':
result = num1 * num2
if result % 2 == 0:
print(f'{num1:.0f} * {num2:.0f} = {result:.0f} - even')
else:
print(f'{num1:.0f} * {num2:.0f} = {result:.0f} - odd')
elif operator == '/':
if num2 == 0:
print(f'Cannot divide {num1:.0f} by zero')
else:
result = num1 / num2
print(f'{num1:.0f} / {num2:.0f} = {result:.2f}')
elif operator == '%':
if num2 == 0:
print(f'Cannot divide {num1:.0f} by zero')
else:
result = num1 % num2
print(f'{num1:.0f} % {num2:.0f} = {result:.0f}')
| 30.285714
| 63
| 0.492453
| 142
| 1,060
| 3.676056
| 0.147887
| 0.114943
| 0.153257
| 0.183908
| 0.913793
| 0.913793
| 0.913793
| 0.913793
| 0.909962
| 0.909962
| 0
| 0.086387
| 0.279245
| 1,060
| 34
| 64
| 31.176471
| 0.596859
| 0
| 0
| 0.363636
| 0
| 0
| 0.388679
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.30303
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e07db7c6bf60325bcaf41834fa3d46aae2709da6
| 617
|
py
|
Python
|
rastervision/v2/rv/data/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | 1
|
2019-11-07T10:02:23.000Z
|
2019-11-07T10:02:23.000Z
|
rastervision/v2/rv/data/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | null | null | null |
rastervision/v2/rv/data/__init__.py
|
carderne/raster-vision
|
915fbcd3263d8f2193e65c2cd0eb53e050a47a01
|
[
"Apache-2.0"
] | null | null | null |
# flake8: noqa
from rastervision.v2.rv.data.activate_mixin import *
from rastervision.v2.rv.data.raster_source import *
from rastervision.v2.rv.data.crs_transformer import *
from rastervision.v2.rv.data.label import *
from rastervision.v2.rv.data.vector_source import *
from rastervision.v2.rv.data.label_source import *
from rastervision.v2.rv.data.label_store import *
from rastervision.v2.rv.data.scene import *
from rastervision.v2.rv.data.scene_config import *
from rastervision.v2.rv.data.dataset import *
from rastervision.v2.rv.data.dataset_config import *
from rastervision.v2.rv.data.class_config import *
| 41.133333
| 53
| 0.815235
| 95
| 617
| 5.2
| 0.221053
| 0.388664
| 0.437247
| 0.48583
| 0.856275
| 0.807692
| 0.686235
| 0.165992
| 0
| 0
| 0
| 0.023009
| 0.084279
| 617
| 14
| 54
| 44.071429
| 0.851327
| 0.019449
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 8
|
e0a9004228826775b216c52a54f1ba312fff5ef9
| 21,867
|
py
|
Python
|
DEPPformat.py
|
jack9950/reports
|
5b113ddfb57914936438b8e6e594be02956e26d6
|
[
"MIT"
] | null | null | null |
DEPPformat.py
|
jack9950/reports
|
5b113ddfb57914936438b8e6e594be02956e26d6
|
[
"MIT"
] | null | null | null |
DEPPformat.py
|
jack9950/reports
|
5b113ddfb57914936438b8e6e594be02956e26d6
|
[
"MIT"
] | null | null | null |
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# This contains data for the salesemail.py breakdownemail.py scripts
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# ------------------------------------------------------------------------------
# This is data for the salesemail.py script
# ------------------------------------------------------------------------------
agentRowStart = "<tr style='height:15.0pt'>"
agentRowEnd = "</tr>"
agentIDStart = "<td width=63 nowrap valign=bottom style='width:47.55pt;border:none;border-left:solid windowtext 1.0pt;padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal><span style='color:black'>"
agentIDEnd = "<o:p></o:p></span></p></td>"
agentNameStart = "<td width=191 nowrap valign=bottom style='width:143.0pt;border:none;border-right:solid windowtext 1.0pt;padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal><span style='color:black'>"
agentNameEnd = "<o:p></o:p></span></p></td>"
callsHandledStart = "<td width=80 nowrap valign=bottom style='width:60.3pt;padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal align=center style='text-align:center'><span style='color:black'>"
callsHandledEnd = "<o:p></o:p></span></p></td>"
salesCallsHandledStart = "<td width=80 nowrap valign=bottom style='width:60.3pt;border:none;border-right:solid windowtext 1.0pt;padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal align=center style='text-align:center'><span style='color:black'>"
salesCallsHandledEnd = "<o:p></o:p></span></p></td>"
bounceSalesStart = "<td width=73 nowrap valign=bottom style='width:55.05pt;padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal align=center style='text-align:center'><span style='color:black'>"
bounceSalesEnd = "<o:p></o:p></span></p></td>"
closeRateStartRed = "<td width=57 nowrap valign=bottom style='width:43.05pt;background:#FFC7CE;padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal align=center style='text-align:center'><b><span style='color:#9C0006'>"
closeRateStartYellow = "<td width=57 nowrap valign=bottom style='width:43.05pt;background:#FFEB9C;padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal align=center style='text-align:center'><b><span style='color:#9C6500'>"
closeRateStartGreen = "<td width=57 nowrap valign=bottom style='width:43.05pt;background:#C6EFCE;padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal align=center style='text-align:center'><b><span style='color:#006100'>"
closeRateEnd = "<o:p></o:p></span></b></p></td>"
FCPSalesStart = "<td width=55 nowrap valign=bottom style='width:41.55pt;border:none;border-left:solid windowtext 1.0pt;padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal align=center style='text-align:center'><span style='color:black'>"
FCPSalesEnd = "<o:p></o:p></span></p></td>"
closeRateStartNoColor = "<td width=57 nowrap valign=bottom style='width:43.05pt;padding:0in 5.4pt 0in 5.4pt;height:15.0pt'>"
DEPPSalesStartNoColor = "<td width=55 nowrap valign=bottom style='width:41.55pt; border:none; border-right:solid windowtext 1.0pt; padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal align=center style='text-align:center'><span style='color:black'>"
# DEPPSalesStartNoColor = "<td width=55 nowrap valign=bottom style='width:41.55pt; border:none; border-right:solid windowtext 1.0pt; padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal align=center style='text-align:center'><span style='color:black'>"
DEPPSalesStart = "<td width=55 nowrap valign=bottom style='width:41.55pt;border:none;border-right:solid windowtext 1.0pt;padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal align=center style='text-align:center'><span style='color:black'>"
closeRateStartGreen = "<td width=57 nowrap valign=bottom style='width:43.05pt;background:#C6EFCE;padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal align=center style='text-align:center'><b><span style='color:#006100'>"
DEPPSalesStartGreen = "<td width=55 nowrap valign=bottom style='width:41.55pt;background:#C6EFCE;border:none;border-right:solid windowtext 1.0pt;padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal align=center style='text-align:center'><b><span style='color:#006100'>"
# DEPPSalesStartGreen = "<td width=57 nowrap valign=bottom style='width:43.05pt;background:#C6EFCE;padding:0in 5.4pt 0in 5.4pt;height:15.0pt'><p class=MsoNormal align=center style='text-align:center'><b><span style='color:#006100'>"
DEPPSalesEnd = "<o:p></o:p></b></span></p></td>"
supRowStart = "<tr style = 'height:.25in'>"
supRowEnd = "</tr>"
supIDStart = "<td width=63 nowrap valign=bottom style='width:47.55pt;border:solid windowtext 1.0pt;border-right:none;background:#9BC2E6;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal><b><span style='font-size:13.0pt;color:black'>"
supNameStart = "<td width=191 nowrap valign=bottom style='width:143.0pt;border:solid windowtext 1.0pt;border-left:none;background:#9BC2E6;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal><b><span style='font-size:13.0pt;color:black'>"
supCallsHandledStart = "<td width=80 nowrap valign=bottom style='width:60.3pt;border-top:solid windowtext 1.0pt;border-left:none;border-bottom:solid windowtext 1.0pt;border-right:none;background:#9BC2E6;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:black'>"
supSalesCallsHandledStart = "<td width=80 nowrap valign=bottom style='width:60.3pt;border:solid windowtext 1.0pt;border-left:none;background:#9BC2E6;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:black'>"
supBounceSalesStart = "<td width=73 nowrap valign=bottom style='width:55.05pt;border-top:solid windowtext 1.0pt;border-left:none;border-bottom:solid windowtext 1.0pt;border-right:none;background:#9BC2E6;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:black'>"
supCloseRateStartRed = "<td width=57 nowrap valign=bottom style='width:43.05pt;border-top:solid windowtext 1.0pt;border-left:none;border-bottom:solid windowtext 1.0pt;border-right:none;background:#FFC7CE;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:#9C0006'>"
supCloseRateStartYellow = "<td width=57 nowrap valign=bottom style='width:43.05pt;border-top:solid windowtext 1.0pt;border-left:none;border-bottom:solid windowtext 1.0pt;border-right:none;background:#FFEB9C;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:#9C6500'>"
supCloseRateStartGreen = "<td width=57 nowrap valign=bottom style='width:43.05pt;border-top:solid windowtext 1.0pt;border-left:none;border-bottom:solid windowtext 1.0pt;border-right:none;background:#C6EFCE;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:#006100'>"
supCloseRateStartNoColor = "<td width=57 nowrap valign=bottom style='width:43.05pt;border-top:solid windowtext 1.0pt;border-left:none;border-bottom:solid windowtext 1.0pt;border-right:none;background:#9BC2E6;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:black'>"
supFCPSalesStart = "<td width=55 nowrap valign=bottom style='width:41.55pt;border:solid windowtext 1.0pt;border-right:none;background:#9BC2E6;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:black'>"
supDEPPSalesStart = "<td width=55 nowrap valign=bottom style='width:41.55pt;border:solid windowtext 1.0pt;border-left:none;background:#9BC2E6;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:black'>"
grandTotalRowStart = "<tr style='height:.25in'>"
grandTotalRowEnd = "</tr>"
gTotalIDStart = "<td width=63 nowrap valign=bottom style='width:47.55pt;border-top:none;border-left:solid windowtext 1.0pt;border-bottom:solid windowtext 1.0pt;border-right:none;background:#DBDBDB;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal><b><span style='font-size:13.0pt;color:black'>"
gTotalNameStart = "<td width=191 nowrap valign=bottom style='width:143.0pt;border-top:none;border-left:none;border-bottom:solid windowtext 1.0pt;border-right:solid windowtext 1.0pt;background:#DBDBDB;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal><b><span style='font-size:13.0pt;color:black'>"
gTotalCallsHandledStart = "<td width=80 nowrap valign=bottom style='width:60.3pt;border:none;border-bottom:solid windowtext 1.0pt;background:#DBDBDB;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:black'>"
gTotalSalesCallsHandledStart = "<td width=80 nowrap valign=bottom style='width:60.3pt;border-top:none;border-left:none;border-bottom:solid windowtext 1.0pt;border-right:solid windowtext 1.0pt;background:#DBDBDB;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:black'>"
gTotalBounceSalesStart = "<td width=73 nowrap valign=bottom style='width:55.05pt;border:none;border-bottom:solid windowtext 1.0pt;background:#DBDBDB;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:black'>"
gTotalCloseRateStartRed = "<td width=57 nowrap valign=bottom style='width:43.05pt;border:none;border-bottom:solid windowtext 1.0pt;background:#FFC7CE;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:#9C0006'>"
gTotalCloseRateStartYellow = "<td width=57 nowrap valign=bottom style='width:43.05pt;border:none;border-bottom:solid windowtext 1.0pt;background:#FFEB9C;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:#9C6500'>"
gTotalCloseRateStartGreen = "<td width=57 nowrap valign=bottom style='width:43.05pt;border:none;border-bottom:solid windowtext 1.0pt;background:#C6EFCE;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:#006100'>"
gTotalFCPSalesStart = "<td width=55 nowrap valign=bottom style='width:41.55pt;border-top:none;border-left:solid windowtext 1.0pt;border-bottom:solid windowtext 1.0pt;border-right:none;background:#DBDBDB;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:black'>"
gTotalDEPPSalesStart = "<td width=55 nowrap valign=bottom style='width:41.55pt;border-top:none;border-left:none;border-bottom:solid windowtext 1.0pt;border-right:solid windowtext 1.0pt;background:#DBDBDB;padding:0in 5.4pt 0in 5.4pt;height:.25in'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:13.0pt;color:black'>"
topOfTable = """
<html xmlns:v="urn:schemas-microsoft-com:vml" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:w="urn:schemas-microsoft-com:office:word" xmlns:x="urn:schemas-microsoft-com:office:excel" xmlns:m="http://schemas.microsoft.com/office/2004/12/omml" xmlns="http://www.w3.org/TR/REC-html40">
<head>
<META HTTP-EQUIV="Content-Type" CONTENT="text/html; charset=us-ascii">
<meta name=Generator content="Microsoft Word 15 (filtered medium)"><style><!--
/* Font Definitions */
@font-face
{font-family:"Cambria Math";
panose-1:2 4 5 3 5 4 6 3 2 4;}
@font-face
{font-family:Calibri;
panose-1:2 15 5 2 2 2 4 3 2 4;}
/* Style Definitions */
p.MsoNormal, li.MsoNormal, div.MsoNormal
{margin:0in;
margin-bottom:.0001pt;
font-size:11.0pt;
font-family:"Calibri",sans-serif;}
a:link, span.MsoHyperlink
{mso-style-priority:99;
color:#0563C1;
text-decoration:underline;}
a:visited, span.MsoHyperlinkFollowed
{mso-style-priority:99;
color:#954F72;
text-decoration:underline;}
span.EmailStyle17
{mso-style-type:personal;
font-family:"Calibri",sans-serif;
color:windowtext;}
span.EmailStyle18
{mso-style-type:personal-reply;
font-family:"Calibri",sans-serif;
color:#1F497D;}
.MsoChpDefault
{mso-style-type:export-only;
font-size:10.0pt;}
@page WordSection1
{size:8.5in 11.0in;
margin:1.0in 1.0in 1.0in 1.0in;}
div.WordSection1
{page:WordSection1;}
--></style><!--[if gte mso 9]><xml>
<o:shapedefaults v:ext="edit" spidmax="1026" />
</xml><![endif]--><!--[if gte mso 9]><xml>
<o:shapelayout v:ext="edit">
<o:idmap v:ext="edit" data="1" />
</o:shapelayout></xml><![endif]-->
</head>
<body lang=EN-US link="#0563C1" vlink="#954F72">
<br>
<div class=WordSection1>
<table class=MsoNormalTable border=0 cellspacing=0 cellpadding=0 width=0 style='width:364.1pt;border-collapse:collapse'>
<tr style='height:54.75pt'>
<td width=63 valign=bottom style='width:47.55pt;border:solid windowtext 1.0pt;border-right:none;background:#9BC2E6;padding:0in 5.4pt 0in 5.4pt;height:54.75pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>Agent ID<o:p></o:p></span></b></p></td>
<td width=191 valign=bottom style='width:143.0pt;border:solid windowtext 1.0pt;border-left:none;background:#9BC2E6;padding:0in 5.4pt 0in 5.4pt;height:54.75pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>Agent Name<o:p></o:p></span></b></p></td>
<td width=55 valign=bottom style='width:41.55pt;border-top:solid windowtext 1.0pt;border-left:none;border-bottom:solid windowtext 1.0pt;border-right:solid windowtext 1.0pt;background:#9BC2E6;padding:0in 5.4pt 0in 5.4pt;height:54.75pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>DEPP Sales<o:p></o:p></span></b></p></td>
</tr>
"""
# ------------------------------------------------------------------------------
# This is data for the breakdown email
# ------------------------------------------------------------------------------
topOfBreakdownTable = """
<html xmlns:v="urn:schemas-microsoft-com:vml" xmlns:o="urn:schemas-microsoft-com:office:office" xmlns:w="urn:schemas-microsoft-com:office:word" xmlns:x="urn:schemas-microsoft-com:office:excel" xmlns:m="http://schemas.microsoft.com/office/2004/12/omml" xmlns="http://www.w3.org/TR/REC-html40">
<head>
<meta http-equiv=Content-Type content="text/html; charset=us-ascii">
<meta name=Generator content="Microsoft Word 15 (filtered medium)">
<style><!--
/* Font Definitions */
@font-face
{font-family:"Cambria Math";
panose-1:2 4 5 3 5 4 6 3 2 4;}
@font-face
{font-family:Calibri;
panose-1:2 15 5 2 2 2 4 3 2 4;}
/* Style Definitions */
p.MsoNormal, li.MsoNormal, div.MsoNormal
{margin:0in;
margin-bottom:.0001pt;
font-size:11.0pt;
font-family:"Calibri",sans-serif;}
a:link, span.MsoHyperlink
{mso-style-priority:99;
color:#0563C1;
text-decoration:underline;}
a:visited, span.MsoHyperlinkFollowed
{mso-style-priority:99;
color:#954F72;
text-decoration:underline;}
span.EmailStyle17
{mso-style-type:personal-compose;
font-family:"Calibri",sans-serif;
color:windowtext;}
.MsoChpDefault
{mso-style-type:export-only;
font-family:"Calibri",sans-serif;}
@page WordSection1
{size:8.5in 11.0in;
margin:1.0in 1.0in 1.0in 1.0in;}
div.WordSection1
{page:WordSection1;}
--></style><!--[if gte mso 9]><xml>
<o:shapedefaults v:ext="edit" spidmax="1026" />
</xml><![endif]--><!--[if gte mso 9]><xml>
<o:shapelayout v:ext="edit">
<o:idmap v:ext="edit" data="1" />
</o:shapelayout></xml><![endif]-->
</head>
<body lang=EN-US link="#0563C1" vlink="#954F72">
<div class=WordSection1>
<table class=MsoNormalTable border=0 cellspacing=0 cellpadding=0 width=1273 style='width:955.0pt;border-collapse:collapse'>
<tr style='height:33.75pt'>
<td width=489 colspan=4 valign=bottom style='width:367.0pt;padding:0in 5.4pt 0in 5.4pt;height:33.75pt'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:26.0pt;font-family:"Arial",sans-serif;color:black'>Electricity Sales<o:p></o:p></span></b></p></td>
<td width=19 valign=bottom style='width:14.0pt;padding:0in 5.4pt 0in 5.4pt;height:33.75pt'></td>
<td width=765 colspan=5 valign=bottom style='width:574.0pt;padding:0in 5.4pt 0in 5.4pt;height:33.75pt'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:26.0pt;font-family:"Arial",sans-serif;color:black'>DEPP<o:p></o:p></span></b></p></td>
</tr>
<tr style='height:56.25pt'>
<td width=167 valign=bottom style='width:125.0pt;background:#BDD7EE;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>Agent Name<o:p></o:p></span></b></p></td>
<td width=75 valign=bottom style='width:56.0pt;background:#BDD7EE;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>POGO Account Number<o:p></o:p></span></b></p></td>
<td width=75 valign=bottom style='width:56.0pt;background:#BDD7EE;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>POGO Order Number<o:p></o:p></span></b></p></td>
<td width=173 valign=bottom style='width:130.0pt;background:#BDD7EE;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>POGO Order Status<o:p></o:p></span></b></p></td>
<td width=19 valign=bottom style='width:14.0pt;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'></td>
<td width=167 valign=bottom style='width:125.0pt;background:#BDD7EE;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>Agent Name<o:p></o:p></span></b></p></td>
<td width=75 valign=bottom style='width:56.0pt;background:#BDD7EE;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>POGO Account Number<o:p></o:p></span></b></p></td>
<td width=75 valign=bottom style='width:56.0pt;background:#BDD7EE;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>POGO Order Number<o:p></o:p></span></b></p></td>
<td width=231 valign=bottom style='width:173.0pt;background:#BDD7EE;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>DEPP Name<o:p></o:p></span></b></p></td>
<td width=173 valign=bottom style='width:130.0pt;background:#BDD7EE;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>POGO Order Status<o:p></o:p></span></b></p></td>
</tr>
</table>
<p class=MsoNormal><o:p> </o:p></p>
<p class=MsoNormal><o:p> </o:p></p>
<table class=MsoNormalTable border=0 cellspacing=0 cellpadding=0 width=851 style='width:638.0pt;border-collapse:collapse'>
<tr style='height:33.75pt'>
<td width=325 nowrap colspan=2 valign=bottom style='width:244.0pt;padding:0in 5.4pt 0in 5.4pt;height:33.75pt'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:26.0pt;font-family:"Arial",sans-serif;color:black'>FCP Sales<o:p></o:p></span></b></p></td>
<td width=80 nowrap valign=bottom style='width:60.0pt;padding:0in 5.4pt 0in 5.4pt;height:33.75pt'></td>
<td width=445 nowrap colspan=3 valign=bottom style='width:334.0pt;padding:0in 5.4pt 0in 5.4pt;height:33.75pt'><p class=MsoNormal align=center style='text-align:center'><b><span style='font-size:26.0pt;font-family:"Arial",sans-serif;color:black'>FCP Opportunities<o:p></o:p></span></b></p></td>
</tr>
<tr style='height:56.25pt'>
<td width=167 valign=bottom style='width:125.0pt;background:#BDD7EE;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>Agent Name<o:p></o:p></span></b></p></td>
<td width=159 valign=bottom style='width:119.0pt;background:#BDD7EE;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>First Choice Power Account Number<o:p></o:p></span></b></p></td>
<td width=80 nowrap valign=bottom style='width:60.0pt;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'></td>
<td width=167 valign=bottom style='width:125.0pt;background:#BDD7EE;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>Agent Name<o:p></o:p></span></b></p></td>
<td width=128 valign=bottom style='width:96.0pt;background:#BDD7EE;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>POGO Account Number<o:p></o:p></span></b></p></td>
<td width=151 valign=bottom style='width:113.0pt;background:#BDD7EE;padding:0in 5.4pt 0in 5.4pt;height:56.25pt'><p class=MsoNormal><b><span style='font-size:14.0pt;color:black'>Pogo Status<o:p></o:p></span></b></p></td>
</tr>
</table>
</div>
</body>
</html>
"""
| 100.307339
| 358
| 0.701331
| 3,507
| 21,867
| 4.372968
| 0.07927
| 0.032342
| 0.056599
| 0.088941
| 0.907864
| 0.902126
| 0.897822
| 0.883086
| 0.880477
| 0.876109
| 0
| 0.076742
| 0.087666
| 21,867
| 217
| 359
| 100.769585
| 0.69198
| 0.057895
| 0
| 0.576531
| 0
| 0.341837
| 0.941025
| 0.571484
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 1
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
e0b49385ea18e0bbdb6e507c6af15df867ac7de6
| 3,481
|
py
|
Python
|
pyaz/policy/exemption/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/policy/exemption/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | null | null | null |
pyaz/policy/exemption/__init__.py
|
py-az-cli/py-az-cli
|
9a7dc44e360c096a5a2f15595353e9dad88a9792
|
[
"MIT"
] | 1
|
2022-02-03T09:12:01.000Z
|
2022-02-03T09:12:01.000Z
|
'''
Manage resource policy exemptions.
'''
from ... pyaz_utils import _call_az
def create(name, description=None, display_name=None, exemption_category=None, expires_on=None, metadata=None, policy_assignment=None, policy_definition_reference_ids=None, resource_group=None, scope=None):
'''
Create a policy exemption.
Required Parameters:
- name -- Name of the new policy exemption.
Optional Parameters:
- description -- Description of policy exemption.
- display_name -- Display name of the policy exemption.
- exemption_category -- The policy exemption category of the policy exemption
- expires_on -- The expiration date and time (in UTC ISO 8601 format yyyy-MM-ddTHH:mm:ssZ) of the policy exemption.
- metadata -- Metadata in space-separated key=value pairs.
- policy_assignment -- The referenced policy assignment Id for the policy exemption.
- policy_definition_reference_ids -- The policy definition reference ids to exempt in the initiative (policy set).
- resource_group -- the resource group where the policy will be applied
- scope -- Scope to which this policy exemption applies.
'''
return _call_az("az policy exemption create", locals())
def delete(name, resource_group=None, scope=None):
'''
Delete a policy exemption.
Required Parameters:
- name -- Name of the policy exemption.
Optional Parameters:
- resource_group -- the resource group where the policy will be applied
- scope -- Scope to which this policy exemption applies.
'''
return _call_az("az policy exemption delete", locals())
def list(disable_scope_strict_match=None, resource_group=None, scope=None):
'''
List policy exemptions.
Optional Parameters:
- disable_scope_strict_match -- Include policy exemptions either inherited from parent scope or at child scope.
- resource_group -- the resource group where the policy will be applied
- scope -- Scope to which this policy exemption applies.
'''
return _call_az("az policy exemption list", locals())
def show(name, resource_group=None, scope=None):
'''
Show a policy exemption.
Required Parameters:
- name -- Name of the policy exemption.
Optional Parameters:
- resource_group -- the resource group where the policy will be applied
- scope -- Scope to which this policy exemption applies.
'''
return _call_az("az policy exemption show", locals())
def update(name, description=None, display_name=None, exemption_category=None, expires_on=None, metadata=None, policy_definition_reference_ids=None, resource_group=None, scope=None):
'''
Update a policy exemption.
Required Parameters:
- name -- Name of the policy exemption.
Optional Parameters:
- description -- Description of policy exemption.
- display_name -- Display name of the policy exemption.
- exemption_category -- The policy exemption category of the policy exemption
- expires_on -- The expiration date and time (in UTC ISO 8601 format yyyy-MM-ddTHH:mm:ssZ) of the policy exemption.
- metadata -- Metadata in space-separated key=value pairs.
- policy_definition_reference_ids -- The policy definition reference ids to exempt in the initiative (policy set).
- resource_group -- the resource group where the policy will be applied
- scope -- Scope to which this policy exemption applies.
'''
return _call_az("az policy exemption update", locals())
| 40.476744
| 206
| 0.72709
| 453
| 3,481
| 5.465784
| 0.181015
| 0.175687
| 0.087237
| 0.072698
| 0.838449
| 0.838449
| 0.8021
| 0.8021
| 0.8021
| 0.783118
| 0
| 0.00285
| 0.193623
| 3,481
| 85
| 207
| 40.952941
| 0.879231
| 0.665326
| 0
| 0
| 0
| 0
| 0.135048
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.454545
| false
| 0
| 0.090909
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
e0b9e206eaf66510a1ba338463260de7ec1099eb
| 590
|
py
|
Python
|
Python/hollow_diamond.py
|
Mario263/Hacktoberfest_2021
|
57965f48d3b19d25d2c0b75525eab4c4dce0157a
|
[
"MIT"
] | 16
|
2021-10-15T08:41:52.000Z
|
2022-01-02T11:14:30.000Z
|
Python/hollow_diamond.py
|
Mario263/Hacktoberfest_2021
|
57965f48d3b19d25d2c0b75525eab4c4dce0157a
|
[
"MIT"
] | 5
|
2021-10-17T06:04:41.000Z
|
2021-10-30T16:45:40.000Z
|
Python/hollow_diamond.py
|
Mario263/Hacktoberfest_2021
|
57965f48d3b19d25d2c0b75525eab4c4dce0157a
|
[
"MIT"
] | 43
|
2021-10-15T14:03:48.000Z
|
2022-03-09T21:32:46.000Z
|
row = int(input('Enter number of row: '))
# Upper part of hollow diamond
for i in range(1, row+1):
for j in range(1,row-i+1):
print(" ", end="")
for j in range(1, 2*i):
if j==1 or j==2*i-1:
print("*", end="")
else:
print(" ", end="")
print()
# Lower part of hollow diamond
for i in range(row-1,0, -1):
for j in range(1,row-i+1):
print(" ", end="")
for j in range(1, 2*i):
if j==1 or j==2*i-1:
print("*", end="")
else:
print(" ", end="")
print()
| 24.583333
| 42
| 0.433898
| 94
| 590
| 2.723404
| 0.255319
| 0.164063
| 0.15625
| 0.171875
| 0.820313
| 0.820313
| 0.820313
| 0.820313
| 0.585938
| 0.585938
| 0
| 0.051351
| 0.372881
| 590
| 23
| 43
| 25.652174
| 0.640541
| 0.09661
| 0
| 0.842105
| 0
| 0
| 0.053254
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0.421053
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 7
|
e0f947ad4c6813a3fb769bda7eb2aad7f9525923
| 20,941
|
py
|
Python
|
backend/tensor_site/migrations/0001_initial.py
|
b3none/Tensor
|
6c70c7d3ade6eabe4162d0b9eef0923c79ea1eba
|
[
"MIT"
] | null | null | null |
backend/tensor_site/migrations/0001_initial.py
|
b3none/Tensor
|
6c70c7d3ade6eabe4162d0b9eef0923c79ea1eba
|
[
"MIT"
] | null | null | null |
backend/tensor_site/migrations/0001_initial.py
|
b3none/Tensor
|
6c70c7d3ade6eabe4162d0b9eef0923c79ea1eba
|
[
"MIT"
] | 3
|
2021-09-06T18:01:52.000Z
|
2021-10-18T02:49:53.000Z
|
# Generated by Django 3.0.5 on 2020-08-19 16:05
import datetime
from django.db import migrations, models
import django.db.models.deletion
import django.utils.timezone
import markdownx.models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Mostactive_awp',
fields=[
('playername', models.CharField(max_length=128)),
('steamid', models.CharField(max_length=32, primary_key=True, serialize=False)),
('last_accountuse', models.IntegerField()),
('timect', models.IntegerField(blank=True, db_column='timeCT', null=True)),
('timett', models.IntegerField(blank=True, db_column='timeTT', null=True)),
('timespe', models.IntegerField(blank=True, db_column='timeSPE', null=True)),
('total', models.IntegerField(blank=True, null=True)),
],
options={
'db_table': 'mostactive',
'managed': False,
},
),
migrations.CreateModel(
name='Rank_awp',
fields=[
('steam', models.CharField(blank=True, max_length=40, primary_key=True, serialize=False, unique=True)),
('name', models.TextField(blank=True, null=True)),
('lastip', models.TextField(blank=True, null=True)),
('score', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('kills', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('deaths', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('assists', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('suicides', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('tk', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('shots', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hits', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('headshots', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('connected', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('rounds_tr', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('rounds_ct', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('lastconnect', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('knife', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('glock', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hkp2000', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('usp_silencer', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('p250', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('deagle', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('elite', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('fiveseven', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('tec9', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('cz75a', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('revolver', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('nova', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('xm1014', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mag7', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('sawedoff', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('bizon', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mac10', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mp9', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mp7', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ump45', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('p90', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('galilar', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ak47', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('scar20', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('famas', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('m4a1', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('m4a1_silencer', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('aug', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ssg08', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('sg556', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('awp', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('g3sg1', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('m249', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('negev', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hegrenade', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('flashbang', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('smokegrenade', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('inferno', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('decoy', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('taser', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mp5sd', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('breachcharge', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('head', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('chest', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('stomach', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('left_arm', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('right_arm', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('left_leg', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('right_leg', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('c4_planted', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('c4_exploded', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('c4_defused', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ct_win', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('tr_win', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hostages_rescued', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('vip_killed', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('vip_escaped', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('vip_played', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mvp', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('damage', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('match_win', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('match_draw', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('match_lose', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('first_blood', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('no_scope', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('no_scope_dis', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
],
options={
'db_table': 'rankme',
'managed': False,
},
),
migrations.CreateModel(
name='Rank_retake',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('steam', models.CharField(blank=True, max_length=40, null=True, unique=True)),
('name', models.TextField(blank=True, null=True)),
('lastip', models.TextField(blank=True, null=True)),
('score', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('kills', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('deaths', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('assists', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('suicides', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('tk', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('shots', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hits', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('headshots', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('connected', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('rounds_tr', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('rounds_ct', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('lastconnect', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('knife', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('glock', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hkp2000', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('usp_silencer', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('p250', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('deagle', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('elite', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('fiveseven', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('tec9', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('cz75a', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('revolver', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('nova', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('xm1014', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mag7', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('sawedoff', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('bizon', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mac10', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mp9', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mp7', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ump45', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('p90', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('galilar', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ak47', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('scar20', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('famas', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('m4a1', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('m4a1_silencer', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('aug', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ssg08', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('sg556', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('awp', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('g3sg1', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('m249', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('negev', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hegrenade', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('flashbang', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('smokegrenade', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('inferno', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('decoy', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('taser', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mp5sd', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('breachcharge', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('head', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('chest', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('stomach', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('left_arm', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('right_arm', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('left_leg', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('right_leg', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('c4_planted', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('c4_exploded', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('c4_defused', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('ct_win', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('tr_win', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('hostages_rescued', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('vip_killed', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('vip_escaped', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('vip_played', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('mvp', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('damage', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('match_win', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('match_draw', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('match_lose', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('first_blood', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('no_scope', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
('no_scope_dis', models.DecimalField(blank=True, decimal_places=0, max_digits=10, null=True)),
],
options={
'db_table': 'rankme',
'managed': False,
},
),
migrations.CreateModel(
name='Donators',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('SteamID', models.CharField(max_length=30)),
('Name', models.CharField(max_length=100, null=True)),
('amount', models.IntegerField()),
('date', models.DateTimeField(default=datetime.datetime.now)),
],
options={
'verbose_name_plural': 'Donators',
},
),
migrations.CreateModel(
name='News',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=100)),
('content', markdownx.models.MarkdownxField()),
('date', models.DateField(default=django.utils.timezone.now)),
],
options={
'verbose_name_plural': 'News',
},
),
migrations.CreateModel(
name='Server',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('ip', models.CharField(max_length=50)),
('port', models.IntegerField()),
],
options={
'verbose_name_plural': 'Servers',
},
),
migrations.CreateModel(
name='PlayerCount',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('timestamp', models.TextField()),
('player_count', models.IntegerField()),
('max_player', models.IntegerField()),
('Name', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='tensor_site.Server')),
],
),
]
| 79.022642
| 119
| 0.625949
| 2,478
| 20,941
| 5.124294
| 0.075464
| 0.119074
| 0.286187
| 0.335958
| 0.922665
| 0.905261
| 0.888565
| 0.888565
| 0.882265
| 0.882265
| 0
| 0.037023
| 0.224822
| 20,941
| 264
| 120
| 79.32197
| 0.74521
| 0.002149
| 0
| 0.801556
| 1
| 0
| 0.07294
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.019455
| 0
| 0.035019
| 0
| 0
| 0
| 0
| null | 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 10
|
46020f6ca5485ff581c4762340829136155b2712
| 504
|
py
|
Python
|
colour/plotting/tm3018/__init__.py
|
JGoldstone/colour
|
6829b363d5f0682bff0f4826995e7ceac189ff28
|
[
"BSD-3-Clause"
] | null | null | null |
colour/plotting/tm3018/__init__.py
|
JGoldstone/colour
|
6829b363d5f0682bff0f4826995e7ceac189ff28
|
[
"BSD-3-Clause"
] | null | null | null |
colour/plotting/tm3018/__init__.py
|
JGoldstone/colour
|
6829b363d5f0682bff0f4826995e7ceac189ff28
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
from .report import (
plot_single_sd_colour_rendition_report_full,
plot_single_sd_colour_rendition_report_intermediate,
plot_single_sd_colour_rendition_report_simple,
)
from .report import plot_single_sd_colour_rendition_report
__all__ = [
'plot_single_sd_colour_rendition_report_full',
'plot_single_sd_colour_rendition_report_intermediate',
'plot_single_sd_colour_rendition_report_simple',
]
__all__ += [
'plot_single_sd_colour_rendition_report',
]
| 28
| 58
| 0.813492
| 65
| 504
| 5.476923
| 0.230769
| 0.224719
| 0.269663
| 0.404494
| 0.97191
| 0.97191
| 0.97191
| 0.86236
| 0.86236
| 0.679775
| 0
| 0.002247
| 0.117063
| 504
| 17
| 59
| 29.647059
| 0.797753
| 0.041667
| 0
| 0
| 0
| 0
| 0.367983
| 0.367983
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.142857
| 0
| 0.142857
| 0
| 0
| 0
| 0
| null | 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 9
|
4603d669b6e3a8f2256f40cd8bd661c139e58dc9
| 10,710
|
py
|
Python
|
hydroinformer/models.py
|
mahelvson/HydroInformer2022
|
1bf4cb9463606f7164bff604c534b99dd057942a
|
[
"Apache-2.0"
] | 1
|
2022-02-16T18:02:51.000Z
|
2022-02-16T18:02:51.000Z
|
hydroinformer/models.py
|
mahelvson/HydroInformer2022
|
1bf4cb9463606f7164bff604c534b99dd057942a
|
[
"Apache-2.0"
] | null | null | null |
hydroinformer/models.py
|
mahelvson/HydroInformer2022
|
1bf4cb9463606f7164bff604c534b99dd057942a
|
[
"Apache-2.0"
] | null | null | null |
import torch
import torch.nn as nn
import torch.nn.functional as F
from utils.masking import TriangularCausalMask, ProbMask
from models.encoder import Encoder, EncoderLayer, ConvLayer, EncoderStack
from models.decoder import Decoder, DecoderLayer
from models.attn import FullAttention, ProbAttention, AttentionLayer
from models.embed import DataEmbedding
class InformerDSI(nn.Module):
"""
Versão com dados estaticos e dinamicos mas que reusa camada fully conected original na saida do encoder
para processar dados estaticos junto aos dinamicos
"""
def __init__(self, enc_in, dec_in, c_out, seq_len, label_len, out_len,
factor=5, d_model=512, n_heads=8, e_layers=3, d_layers=2, d_ff=512,
dropout=0.0, attn='prob', embed='fixed', freq='h', activation='gelu',
output_attention = False, distil=True, mix=True,
device=torch.device('cuda:0'), d_static=26):
super(InformerDSI, self).__init__()
self.pred_len = out_len
self.d_static = d_static
self.d_model = d_model
self.attn = attn
self.output_attention = output_attention
# Encoding
self.enc_embedding = DataEmbedding(enc_in, d_model, embed, freq, dropout)
self.dec_embedding = DataEmbedding(dec_in, d_model, embed, freq, dropout)
# Attention
Attn = ProbAttention if attn=='prob' else FullAttention
# Encoder
self.encoder = Encoder(
[
EncoderLayer(
AttentionLayer(Attn(False, factor, attention_dropout=dropout, output_attention=output_attention),
d_model, n_heads, mix=False),
d_model,
d_ff,
dropout=dropout,
activation=activation
) for l in range(e_layers)
],
[
ConvLayer(
d_model
) for l in range(e_layers-1)
] if distil else None,
norm_layer=torch.nn.LayerNorm(d_model)
)
# Decoder
self.decoder = Decoder(
[
DecoderLayer(
AttentionLayer(Attn(True, factor, attention_dropout=dropout, output_attention=False),
d_model, n_heads, mix=mix),
AttentionLayer(FullAttention(False, factor, attention_dropout=dropout, output_attention=False),
d_model, n_heads, mix=False),
d_model,
d_ff,
dropout=dropout,
activation=activation,
)
for l in range(d_layers)
],
norm_layer=torch.nn.LayerNorm(d_model)
)
# self.end_conv1 = nn.Conv1d(in_channels=label_len+out_len, out_channels=out_len, kernel_size=1, bias=True)
# self.end_conv2 = nn.Conv1d(in_channels=d_model, out_channels=c_out, kernel_size=1, bias=True)
self.projection = nn.Linear(d_model, c_out, bias=True)
def forward(self, x_enc, x_mark_enc, x_dec, x_mark_dec, static_attr,
enc_self_mask=None, dec_self_mask=None, dec_enc_mask=None):
enc_out = self.enc_embedding(x_enc, x_mark_enc)
enc_out, attns = self.encoder(enc_out, attn_mask=enc_self_mask)
dec_out = self.dec_embedding(x_dec, x_mark_dec)
dec_out = self.decoder(dec_out, enc_out, x_mask=dec_self_mask, cross_mask=dec_enc_mask)
proj_in = torch.cat((dec_out, static_attr), dim=1)
dec_out = self.projection(proj_in)
# dec_out = self.end_conv1(dec_out)
# dec_out = self.end_conv2(dec_out.transpose(2,1)).transpose(1,2)
if self.output_attention:
return dec_out[:,-self.pred_len:,:], attns
else:
return dec_out[:,-self.pred_len:,:] # [B, L, D]
class InformerDS(nn.Module):
def __init__(self, enc_in, dec_in, c_out, seq_len, label_len, out_len,
factor=5, d_model=512, n_heads=8, e_layers=3, d_layers=2, d_ff=512,
dropout=0.0, attn='prob', embed='fixed', freq='h', activation='gelu',
output_attention = False, distil=True, mix=True,
device=torch.device('cuda:0'), d_static=26):
super(InformerDS, self).__init__()
self.device = device
self.pred_len = out_len
self.attn = attn
self.output_attention = output_attention
# Encoding
self.enc_embedding = DataEmbedding(enc_in, d_model, embed, freq, dropout)
self.dec_embedding = DataEmbedding(dec_in, d_model, embed, freq, dropout)
# Attention
Attn = ProbAttention if attn=='prob' else FullAttention
# Encoder
self.encoder = Encoder(
[
EncoderLayer(
AttentionLayer(Attn(False, factor, attention_dropout=dropout, output_attention=output_attention),
d_model, n_heads, mix=False),
d_model,
d_ff,
dropout=dropout,
activation=activation
) for l in range(e_layers)
],
[
ConvLayer(
d_model
) for l in range(e_layers-1)
] if distil else None,
norm_layer=torch.nn.LayerNorm(d_model)
)
# Decoder
self.decoder = Decoder(
[
DecoderLayer(
AttentionLayer(Attn(True, factor, attention_dropout=dropout, output_attention=False),
d_model, n_heads, mix=mix),
AttentionLayer(FullAttention(False, factor, attention_dropout=dropout, output_attention=False),
d_model, n_heads, mix=False),
d_model,
d_ff,
dropout=dropout,
activation=activation,
)
for l in range(d_layers)
],
norm_layer=torch.nn.LayerNorm(d_model)
)
# self.end_conv1 = nn.Conv1d(in_channels=label_len+out_len, out_channels=out_len, kernel_size=1, bias=True)
# self.end_conv2 = nn.Conv1d(in_channels=d_model, out_channels=c_out, kernel_size=1, bias=True)
self.projection = nn.Linear(d_model, c_out, bias=True)
self.fc = nn.Linear(d_static + c_out, out_len, bias=True)
def forward(self, x_enc, x_mark_enc, x_dec, x_mark_dec, static_attr,
enc_self_mask=None, dec_self_mask=None, dec_enc_mask=None):
enc_out = self.enc_embedding(x_enc, x_mark_enc)
enc_out, attns = self.encoder(enc_out, attn_mask=enc_self_mask)
dec_out = self.dec_embedding(x_dec, x_mark_dec)
dec_out = self.decoder(dec_out, enc_out, x_mask=dec_self_mask, cross_mask=dec_enc_mask)
dec_out = self.projection(dec_out)
dec_out_slice = dec_out[:, -self.pred_len:,:]
dec_out_slice = dec_out_slice.transpose(2,1)
static_attr = static_attr.transpose(2,1)
fc_in = torch.cat((dec_out_slice, static_attr), dim=1)
fc_in = fc_in.transpose(2,1)
fc_out = self.fc(fc_in)
if self.output_attention:
return fc_out[:,-self.pred_len:,:], attns
else:
return fc_out[:,-self.pred_len:,:] # [B, L, D]
class InformerD(nn.Module):
def __init__(self, enc_in, dec_in, c_out, seq_len, label_len, out_len,
factor=5, d_model=512, n_heads=8, e_layers=3, d_layers=2, d_ff=512,
dropout=0.0, attn='prob', embed='fixed', freq='h', activation='gelu',
output_attention = False, distil=True, mix=True,
device=torch.device('cuda:0'), d_static=0):
super(InformerD, self).__init__()
self.pred_len = out_len
self.attn = attn
self.output_attention = output_attention
# Encoding
self.enc_embedding = DataEmbedding(enc_in, d_model, embed, freq, dropout)
self.dec_embedding = DataEmbedding(dec_in, d_model, embed, freq, dropout)
# Attention
Attn = ProbAttention if attn=='prob' else FullAttention
# Encoder
self.encoder = Encoder(
[
EncoderLayer(
AttentionLayer(Attn(False, factor, attention_dropout=dropout, output_attention=output_attention),
d_model, n_heads, mix=False),
d_model,
d_ff,
dropout=dropout,
activation=activation
) for l in range(e_layers)
],
[
ConvLayer(
d_model
) for l in range(e_layers-1)
] if distil else None,
norm_layer=torch.nn.LayerNorm(d_model)
)
# Decoder
self.decoder = Decoder(
[
DecoderLayer(
AttentionLayer(Attn(True, factor, attention_dropout=dropout, output_attention=False),
d_model, n_heads, mix=mix),
AttentionLayer(FullAttention(False, factor, attention_dropout=dropout, output_attention=False),
d_model, n_heads, mix=False),
d_model,
d_ff,
dropout=dropout,
activation=activation,
)
for l in range(d_layers)
],
norm_layer=torch.nn.LayerNorm(d_model)
)
# self.end_conv1 = nn.Conv1d(in_channels=label_len+out_len, out_channels=out_len, kernel_size=1, bias=True)
# self.end_conv2 = nn.Conv1d(in_channels=d_model, out_channels=c_out, kernel_size=1, bias=True)
self.projection = nn.Linear(d_model, c_out, bias=True)
def forward(self, x_enc, x_mark_enc, x_dec, x_mark_dec, static_attr,
enc_self_mask=None, dec_self_mask=None, dec_enc_mask=None):
enc_out = self.enc_embedding(x_enc, x_mark_enc)
enc_out, attns = self.encoder(enc_out, attn_mask=enc_self_mask)
dec_out = self.dec_embedding(x_dec, x_mark_dec)
dec_out = self.decoder(dec_out, enc_out, x_mask=dec_self_mask, cross_mask=dec_enc_mask)
dec_out = self.projection(dec_out)
# dec_out = self.end_conv1(dec_out)
# dec_out = self.end_conv2(dec_out.transpose(2,1)).transpose(1,2)
if self.output_attention:
return dec_out[:,-self.pred_len:,:], attns
else:
return dec_out[:,-self.pred_len:,:] # [B, L, D]
| 44.811715
| 118
| 0.578711
| 1,323
| 10,710
| 4.393802
| 0.097506
| 0.042319
| 0.030965
| 0.044899
| 0.872011
| 0.856184
| 0.854464
| 0.849475
| 0.838466
| 0.838466
| 0
| 0.011797
| 0.327264
| 10,710
| 239
| 119
| 44.811715
| 0.795003
| 0.101494
| 0
| 0.760204
| 0
| 0
| 0.007516
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.030612
| false
| 0
| 0.040816
| 0
| 0.117347
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
4603da3ce9c85ac9362757e76c4dde79ff65fe6c
| 27,354
|
py
|
Python
|
active_rl/utils/optimization.py
|
Tony-Cheng/Active-Reinforcement-Learning
|
50bb65106ae1f957d8cb6cb5706ce1285519e6b4
|
[
"MIT"
] | null | null | null |
active_rl/utils/optimization.py
|
Tony-Cheng/Active-Reinforcement-Learning
|
50bb65106ae1f957d8cb6cb5706ce1285519e6b4
|
[
"MIT"
] | null | null | null |
active_rl/utils/optimization.py
|
Tony-Cheng/Active-Reinforcement-Learning
|
50bb65106ae1f957d8cb6cb5706ce1285519e6b4
|
[
"MIT"
] | null | null | null |
import torch
import numpy as np
import torch.nn.functional as F
# from baselines.deepq.replay_buffer import PrioritizedReplayBuffer
from .priority_replay import PrioritizedReplayBuffer
from .acquisition_functions import ens_BALD
from torch.distributions import Dirichlet
def standard_optimization(policy_net, target_net, optimizer, memory, batch_size=128,
GAMMA=0.99, training=True, device='cuda'):
if not training:
return None
if len(memory) < batch_size:
return 0
state_batch, action_batch, reward_batch, n_state_batch, done_batch = memory.sample(
batch_size)
state_batch = state_batch.to(device)
action_batch = action_batch.to(device)
reward_batch = reward_batch.to(device)
n_state_batch = n_state_batch.to(device)
done_batch = done_batch.to(device)
q = policy_net(state_batch).gather(1, action_batch)
nq = target_net(n_state_batch).max(1)[0].detach()
# Compute the expected Q values
expected_state_action_values = (
nq * GAMMA)*(1.-done_batch[:, 0]) + reward_batch[:, 0]
# Compute Huber loss
loss = F.smooth_l1_loss(q, expected_state_action_values.unsqueeze(1))
# Optimize the model
optimizer.zero_grad()
loss.backward()
for param in policy_net.parameters():
param.grad.data.clamp_(-1, 1)
optimizer.step()
return loss.detach()
def standard_optimization_value_network(value_net, target_net, optimizer, memory, batch_size=128,
GAMMA=0.99, training=True, device='cuda'):
if not training:
return None
if len(memory) < batch_size:
return 0
state_batch, action_batch, reward_batch, n_state_batch, done_batch = memory.sample(
batch_size)
state_batch = state_batch.to(device)
action_batch = action_batch.to(device)
reward_batch = reward_batch.to(device)
n_state_batch = n_state_batch.to(device)
done_batch = done_batch.to(device)
v = value_net(state_batch)
nv = target_net(n_state_batch).squeeze().detach()
# Compute the expected Q values
expected_state_action_values = (
nv * GAMMA)*(1.-done_batch[:, 0]) + reward_batch[:, 0]
# Compute Huber loss
loss = F.smooth_l1_loss(v, expected_state_action_values.unsqueeze(1))
# Optimize the model
optimizer.zero_grad()
loss.backward()
for param in value_net.parameters():
param.grad.data.clamp_(-1, 1)
optimizer.step()
return loss.detach()
def standard_ddqn_optimization(policy_net, target_net, optimizer, memory,
batch_size=128, GAMMA=0.99, training=True,
device='cuda'):
if not training:
return None
if len(memory) < batch_size:
return 0
state_batch, action_batch, reward_batch, n_state_batch, done_batch = memory.sample(
batch_size)
state_batch = state_batch.to(device)
action_batch = action_batch.to(device)
reward_batch = reward_batch.to(device)
n_state_batch = n_state_batch.to(device)
done_batch = done_batch.to(device)
batch_len = state_batch.size(0)
q = policy_net(state_batch).gather(1, action_batch)
na = policy_net(n_state_batch).max(1)[1].view(batch_len, 1).detach()
nq = target_net(n_state_batch).gather(1, na)
# Compute the expected Q values
expected_state_action_values = (
nq * GAMMA)*(1.-done_batch) + reward_batch
# Compute Huber loss
loss = F.smooth_l1_loss(q, expected_state_action_values)
# Optimize the model
optimizer.zero_grad()
loss.backward()
for param in policy_net.parameters():
param.grad.data.clamp_(-1, 1)
optimizer.step()
return loss.detach()
def AMN_optimization(AMN_net, expert_net, optimizer, memory, feature_regression=False, tau=0.1,
beta=0.01, batch_size=256, GAMMA=0.99, training=True, device='cuda'):
"""
Apply the standard procedure to deep Q network.
"""
if not training or len(memory) < batch_size:
return None
state_batch, _, _, _, _ = memory.sample(batch_size)
state_batch = state_batch.to(device)
if feature_regression:
AMN_q_value, AMN_last_layer = AMN_net(state_batch, last_layer=True)
expert_q_value, expert_last_layer = expert_net(
state_batch, last_layer=True)
loss = F.mse_loss(AMN_last_layer, expert_last_layer.detach())
else:
AMN_q_value = AMN_net(state_batch, last_layer=False)
expert_q_value = expert_net(state_batch, last_layer=False)
loss = 0
AMN_policy = to_policy(AMN_q_value)
expert_policy = to_policy(expert_q_value).detach()
loss -= torch.sum(expert_policy * torch.log(AMN_policy + 1e-8))
optimizer.zero_grad()
loss.backward()
optimizer.step()
return loss.detach()
def _AMN_optimization(AMN_net, expert_net, optimizer, state_batch, feature_regression=False, tau=0.1,
beta=0.01, GAMMA=0.99, training=True, clipping=False):
"""
Apply the standard procedure to deep Q network.
"""
if not training:
return None
if feature_regression:
AMN_q_value, AMN_last_layer = AMN_net(state_batch, last_layer=True)
expert_q_value, expert_last_layer = expert_net(
state_batch, last_layer=True)
loss = F.mse_loss(AMN_last_layer, expert_last_layer.detach())
else:
AMN_q_value = AMN_net(state_batch, last_layer=False)
expert_q_value = expert_net(state_batch, last_layer=False)
loss = 0
AMN_policy = to_policy(AMN_q_value)
expert_policy = to_policy(expert_q_value).detach()
loss -= torch.sum(expert_policy * torch.log(AMN_policy + 1e-8))
optimizer.zero_grad()
loss.backward()
if clipping:
for param in AMN_net.parameters():
param.grad.data.clamp(-1, 1)
optimizer.step()
return loss.detach()
def AMN_perc_optimization(AMN_net, expert_net, optimizer, memory, feature_regression=False, tau=0.1,
percentage=0.1, beta=0.01, batch_size=256, GAMMA=0.99, training=True,
device='cuda'):
if not training:
return None
bs, _, _, _, _ = memory.sample(percentage)
bs_len = bs.shape[0]
loss = 0
for i in range(0, bs_len, batch_size):
if i + batch_size < bs_len:
actual_batch_size = batch_size
else:
actual_batch_size = bs_len - i
next_bs = bs[i: i + actual_batch_size].to(device)
loss += _AMN_optimization(AMN_net, expert_net, optimizer, next_bs, feature_regression=feature_regression,
tau=tau, beta=beta, GAMMA=GAMMA, training=training)
return loss
def _AMN_optimization_ENS(AMN_net, expert_net, optimizer, state_batch, ens_num=None, feature_regression=False, tau=0.1,
beta=0.01, GAMMA=0.99, training=True):
"""
Apply the standard procedure to deep Q network.
"""
if not training:
return None
if feature_regression is True:
AMN_q_value, AMN_last_layer = AMN_net(state_batch, last_layer=True)
expert_q_value, expert_last_layer = expert_net(
state_batch, last_layer=True)
loss = F.mse_loss(AMN_last_layer, expert_last_layer.detach())
else:
AMN_q_value = AMN_net(state_batch, ens_num=ens_num, last_layer=False)
expert_q_value = expert_net(state_batch, last_layer=False)
loss = 0
AMN_policy = to_policy(AMN_q_value)
expert_policy = to_policy(expert_q_value).detach()
loss -= torch.sum(expert_policy * torch.log(AMN_policy + 1e-8))
optimizer.zero_grad()
loss.backward()
optimizer.step()
return loss.detach()
def _AMN_optimization_ENS_mixture(AMN_net, expert_net, optimizer, state_batch, feature_regression=False, tau=0.1,
beta=0.01, GAMMA=0.99, training=True):
"""
Apply the standard procedure to deep Q network.
"""
if not training:
return None
AMN_policy = 0
alpha = Dirichlet(torch.ones(AMN_net.get_num_ensembles())).sample()
for i in range(AMN_net.get_num_ensembles()):
AMN_q_value = AMN_net(state_batch, ens_num=i, last_layer=False)
AMN_policy += alpha[i] * to_policy(AMN_q_value)
loss = 0
expert_q_value = expert_net(state_batch, last_layer=False)
expert_policy = to_policy(expert_q_value).detach()
loss -= torch.sum(expert_policy * torch.log(AMN_policy + 1e-8))
optimizer.zero_grad()
loss.backward()
optimizer.step()
return loss.detach()
def AMN_perc_optimization_ENS(AMN_net, expert_net, optimizer, memory, feature_regression=False, tau=0.1,
percentage=0.1, beta=0.01, batch_size=256, GAMMA=0.99, training=True,
device='cuda'):
if not training:
return None
bs, _, _, _, _ = memory.sample(percentage)
bs_len = bs.shape[0]
loss = 0
for i in range(0, bs_len, batch_size):
for ens_num in range(AMN_net.get_num_ensembles()):
if i + batch_size < bs_len:
actual_batch_size = batch_size
else:
actual_batch_size = bs_len - i
next_bs = bs[i: i + actual_batch_size].to(device)
loss += _AMN_optimization_ENS(AMN_net, expert_net, optimizer, next_bs, ens_num=ens_num, feature_regression=feature_regression,
tau=tau, beta=beta, GAMMA=GAMMA, training=training)
return loss
def AMN_optimization_ensemble_epochs(AMN_net, expert_net, optimizer, memory, epochs,
batch_size=128, GAMMA=0.99, device='cuda'):
loss = 0
for _ in range(epochs):
bs, _, _, _, _ = memory.sample()
bs_len = bs.shape[0]
for i in range(0, bs_len, batch_size):
for ens_num in range(AMN_net.get_num_ensembles()):
if i + batch_size < bs_len:
actual_batch_size = batch_size
else:
actual_batch_size = bs_len - i
next_bs = bs[i: i + actual_batch_size].to(device)
loss += _AMN_optimization_ENS(AMN_net, expert_net,
optimizer, next_bs, ens_num=ens_num, GAMMA=GAMMA)
return loss
def AMN_optimization_ensemble(AMN_net, expert_net, optimizer, memory, tau=0.1,
batch_size=128, GAMMA=0.99, device='cuda'):
loss = 0
bs, _, _, _, _ = memory.sample(batch_size=batch_size)
bs = bs.to(device)
for ens_num in range(AMN_net.get_num_ensembles()):
loss += _AMN_optimization_ENS(AMN_net, expert_net,
optimizer, bs, tau=tau, ens_num=ens_num, GAMMA=GAMMA)
return loss / AMN_net.get_num_ensembles()
def AMN_optimization_ensemble_mixture(AMN_net, expert_net, optimizer, memory,
batch_size=128, GAMMA=0.99, device='cuda'):
loss = 0
bs, _, _, _, _ = memory.sample(batch_size=batch_size)
bs = bs.to(device)
for ens_num in range(AMN_net.get_num_ensembles()):
loss += _AMN_optimization_ENS_mixture(AMN_net, expert_net,
optimizer, bs, GAMMA=GAMMA)
return loss / AMN_net.get_num_ensembles()
def AMN_optimization_epochs(AMN_net, expert_net, optimizer, memory, epochs,
batch_size=128, GAMMA=0.99, device='cuda'):
loss = 0
for _ in range(epochs):
bs, _, _, _, _ = memory.sample()
bs_len = bs.shape[0]
for i in range(0, bs_len, batch_size):
if i + batch_size < bs_len:
actual_batch_size = batch_size
else:
actual_batch_size = bs_len - i
next_bs = bs[i: i + actual_batch_size].to(device)
loss += _AMN_optimization(AMN_net,
expert_net, optimizer, next_bs, GAMMA=GAMMA)
return loss
def standard_optimization_ensemble(policy_net, target_net, optimizer, memory,
batch_size=128, GAMMA=0.99, device='cuda'):
"""
Apply the standard procedure to an ensemble of deep Q network.
"""
if len(memory) < batch_size:
return 0
state_batch, action_batch, reward_batch, n_state_batch, done_batch = memory.sample(
batch_size)
state_batch = state_batch.to(device)
action_batch = action_batch.to(device)
reward_batch = reward_batch.to(device)
n_state_batch = n_state_batch.to(device)
done_batch = done_batch.to(device)
total_loss = 0
for ens_num in range(policy_net.get_num_ensembles()):
q = policy_net(state_batch, ens_num=ens_num).gather(1, action_batch)
nq = target_net(n_state_batch, ens_num=ens_num).max(1)[0].detach()
# Compute the expected Q values
expected_state_action_values = (
nq * GAMMA)*(1.-done_batch[:, 0]) + reward_batch[:, 0]
# Compute Huber loss
loss = F.smooth_l1_loss(q, expected_state_action_values.unsqueeze(1))
total_loss += loss
# Optimize the model
optimizer.zero_grad()
total_loss.backward()
optimizer.step()
return total_loss.detach() / policy_net.get_num_ensembles()
def std_opt_end_independent(policy_net, target_net, optimizer, memory,
batch_size=128, GAMMA=0.99, device='cuda'):
"""
Apply the standard procedure to an ensemble of deep Q network.
"""
if len(memory) < batch_size:
return 0
total_loss = 0
for ens_num in range(policy_net.get_num_ensembles()):
state_batch, action_batch, reward_batch, n_state_batch, done_batch = memory.sample(
batch_size)
state_batch = state_batch.to(device)
action_batch = action_batch.to(device)
reward_batch = reward_batch.to(device)
n_state_batch = n_state_batch.to(device)
done_batch = done_batch.to(device)
q = policy_net(state_batch, ens_num=ens_num).gather(1, action_batch)
nq = target_net(n_state_batch, ens_num=ens_num).max(1)[0].detach()
# Compute the expected Q values
expected_state_action_values = (
nq * GAMMA)*(1.-done_batch[:, 0]) + reward_batch[:, 0]
# Compute Huber loss
loss = F.smooth_l1_loss(q, expected_state_action_values.unsqueeze(1))
total_loss += loss
# Optimize the model
optimizer.zero_grad()
total_loss.backward()
optimizer.step()
return total_loss.detach() / policy_net.get_num_ensembles()
def xplore_opt_ens_BALD(policy_net, target_net, agent_net, optimizer, mem,
batch_size=128, GAMMA=0.99, device='cuda'):
"""
Apply the standard procedure to an ensemble of deep Q network.
"""
state, action, reward, n_state, done = mem.sample(batch_size)
state = state.to(device)
action = action.to(device)
reward = ens_BALD(agent_net, state, tau=1.0,
batch_size=batch_size, device=device).unsqueeze(1).to(
device)
n_state = n_state.to(device)
done = done.to(device)
total_loss = 0
for ens_num in range(policy_net.get_num_ensembles()):
q = policy_net(state, ens_num=ens_num).gather(1, action)
nq = target_net(n_state, ens_num=ens_num).max(1)[0].detach()
# Compute the expected Q values
expected_state_action_values = (
nq * GAMMA)*(1.-done[:, 0]) + reward[:, 0]
# Compute Huber loss
loss = F.smooth_l1_loss(q, expected_state_action_values.unsqueeze(1))
total_loss += loss
# Optimize the model
optimizer.zero_grad()
total_loss.backward()
optimizer.step()
return total_loss.detach() / policy_net.get_num_ensembles()
def standard_opt_ddqn_ensemble(policy_net, target_net, optimizer, memory,
batch_size=128, GAMMA=0.99, device='cuda'):
"""
Apply the standard procedure to an ensemble of deep Q network.
"""
if len(memory) < batch_size:
return 0
state_batch, action_batch, reward_batch, n_state_batch, done_batch = memory.sample(
batch_size)
state_batch = state_batch.to(device)
action_batch = action_batch.to(device)
reward_batch = reward_batch.to(device)
n_state_batch = n_state_batch.to(device)
done_batch = done_batch.to(device)
batch_len = state_batch.size(0)
total_loss = 0
for ens_num in range(policy_net.get_num_ensembles()):
q = policy_net(state_batch, ens_num=ens_num).gather(1, action_batch)
na = policy_net(n_state_batch, ens_num=ens_num).max(1)[
1].view(batch_len, 1)
nq = target_net(n_state_batch, ens_num=ens_num).gather(
1, na).detach()
# Compute the expected Q values
expected_state_action_values = (
nq * GAMMA)*(1.-done_batch) + reward_batch
# Compute Huber loss
loss = F.smooth_l1_loss(q, expected_state_action_values)
total_loss += loss
# Optimize the model
optimizer.zero_grad()
total_loss.backward()
optimizer.step()
return total_loss.detach() / policy_net.get_num_ensembles()
def standard_opt_ens_priority(policy_net, target_net, optimizer,
memory: PrioritizedReplayBuffer, rank_func,
batch_size=128, GAMMA=0.99, beta=0.4,
device='cuda'):
if len(memory) < batch_size:
return 0
data = memory.sample(batch_size, beta)
state_batch = torch.FloatTensor(data[0]).to(device)
batch_len = state_batch.size(0)
action_batch = torch.from_numpy(data[1]).to(device).view(batch_len, 1)
reward_batch = torch.FloatTensor(data[2]).to(device).view(batch_len, 1)
n_state_batch = torch.FloatTensor(data[3]).to(device)
done_batch = torch.FloatTensor(data[4]).to(device).view(batch_len, 1)
idxs = data[6]
total_loss = 0
for ens_num in range(policy_net.get_num_ensembles()):
q = policy_net(state_batch, ens_num=ens_num).gather(1, action_batch)
nq = target_net(n_state_batch, ens_num=ens_num).max(1)[0].detach()
# Compute the expected Q values
expected_state_action_values = (
nq * GAMMA)*(1.-done_batch[:, 0]) + reward_batch[:, 0]
# Compute Huber loss
loss = F.smooth_l1_loss(q, expected_state_action_values.unsqueeze(1))
total_loss += loss
# Optimize the model
optimizer.zero_grad()
total_loss.backward()
optimizer.step()
priorities = rank_func(policy_net, state_batch,
batch_size=batch_size, device=device)
priorities = (F.relu(priorities) + 1e-5).tolist()
memory.update_priorities(idxs, priorities)
return total_loss.detach() / policy_net.get_num_ensembles()
def standard_optimization_priority_td(policy_net, target_net, optimizer,
memory, batch_size=128, GAMMA=0.99,
beta=0.7, device='cuda'):
data = memory.sample(batch_size, beta)
state_batch = torch.FloatTensor(data[0]).to(device)
action_batch = torch.from_numpy(data[1]).to(device).unsqueeze(1)
reward_batch = torch.FloatTensor(data[2]).to(device).unsqueeze(1)
n_state_batch = torch.FloatTensor(data[3]).to(device)
done_batch = torch.FloatTensor(data[4]).to(device).unsqueeze(1)
weights = torch.FloatTensor(data[5]).to(device)
idxs = data[6]
q = policy_net(state_batch).gather(1, action_batch)
nq = target_net(n_state_batch).max(1)[0].detach()
# Compute the expected Q values
expected_state_action_values = (
nq * GAMMA)*(1.-done_batch[:, 0]) + reward_batch[:, 0]
# Compute Huber loss
loss = (((q - expected_state_action_values.unsqueeze(1)) ** 2) * weights).mean()
with torch.no_grad():
priorities = (q - expected_state_action_values.unsqueeze(1)) ** 2
priorities = (priorities + 1e-7).squeeze(1).tolist()
memory.update_priorities(idxs, priorities)
# Optimize the model
optimizer.zero_grad()
loss.backward()
optimizer.step()
return loss.detach()
def std_opt_priority_td_V2(policy_net, target_net, optimizer,
memory, batch_size=128, GAMMA=0.99,
device='cuda'):
data, idxs, weights = memory.sample(batch_size)
state_batch, action_batch, reward_batch, n_state_batch, done_batch = zip(
*data)
state_batch = torch.stack(state_batch).to(device)
batch_len = state_batch.size(0)
action_batch = torch.tensor(action_batch).to(device).view(batch_len, 1)
reward_batch = torch.FloatTensor(
reward_batch).to(device).view(batch_len, 1)
n_state_batch = torch.stack(n_state_batch).to(device)
done_batch = torch.FloatTensor(done_batch).to(device).view(batch_len, 1)
q = policy_net(state_batch).gather(1, action_batch)
nq = target_net(n_state_batch).max(1)[0].detach()
# Compute the expected Q values
expected_state_action_values = (
nq * GAMMA)*(1.-done_batch[:, 0]) + reward_batch[:, 0]
# Compute Huber loss
loss = F.smooth_l1_loss(q, expected_state_action_values.unsqueeze(1))
# Optimize the model
optimizer.zero_grad()
loss.backward()
for param in policy_net.parameters():
param.grad.data.clamp_(-1, 1)
optimizer.step()
priorities = (q - expected_state_action_values.unsqueeze(1)).detach()
priorities = priorities.squeeze().abs().cpu().numpy().tolist()
memory.update_priorities(idxs, priorities)
return loss.detach()
def std_opt_priority_td_V3(policy_net, target_net, optimizer,
memory, batch_size=128, GAMMA=0.99,
device='cuda'):
states, actions, rewards, next_states, dones, idx, weights = memory.sample(
batch_size)
states = torch.FloatTensor(np.float32(states)).to(device)
next_states = torch.FloatTensor(np.float32(next_states)).to(device)
actions = torch.Tensor(actions).long().to(device)
rewards = torch.FloatTensor(rewards).to(device).unsqueeze(1)
dones = torch.FloatTensor(dones).to(device).unsqueeze(1)
weights = torch.FloatTensor(weights).unsqueeze(1).to(device)
q = policy_net(states).gather(1, actions.unsqueeze(1))
nq = target_net(next_states).max(1)[0].unsqueeze(1).detach()
# Compute the expected Q values
expected_q_val = rewards + (nq * GAMMA)*(1.-dones)
# Compute Huber loss
loss = (((q - expected_q_val) ** 2) * weights).mean()
prios = abs((q - expected_q_val).cpu()).squeeze() + 1e-5
memory.update_priorities(idx, prios.data.cpu().numpy())
# Optimize the model
optimizer.zero_grad()
loss.backward()
optimizer.step()
return loss.detach()
def std_opt_ens_priority_V3(policy_net, target_net, optimizer,
memory, rank_func, batch_size=128,
GAMMA=0.99, device='cuda'):
states, actions, rewards, next_states, dones, idx, weights = memory.sample(
batch_size)
states = torch.FloatTensor(np.float32(states)).to(device)
next_states = torch.FloatTensor(np.float32(next_states)).to(device)
actions = torch.Tensor(actions).long().to(device)
rewards = torch.FloatTensor(rewards).to(device).unsqueeze(1)
dones = torch.FloatTensor(dones).to(device).unsqueeze(1)
weights = torch.FloatTensor(weights).unsqueeze(1).to(device)
total_loss = 0
for ens_num in range(policy_net.get_num_ensembles()):
q = policy_net(states, ens_num=ens_num).gather(1, actions.unsqueeze(1))
nq = target_net(next_states, ens_num=ens_num).max(1)[
0].unsqueeze(1).detach()
# Compute the expected Q values
expected_q_val = rewards + (nq * GAMMA)*(1.-dones)
# Compute Huber loss
loss = (((q - expected_q_val) ** 2) * weights).mean()
total_loss += loss
prios = F.relu(rank_func(policy_net, states) + 1e-3) + 1e-3
ceil = prios.clone()
ceil[:] = 10
prios = torch.min(prios, ceil)
memory.update_priorities(idx, prios.data.cpu().numpy())
# Optimize the model
optimizer.zero_grad()
total_loss.backward()
optimizer.step()
return loss.detach()
def standard_opt_ens_priority_td(policy_net, target_net, optimizer,
memory: PrioritizedReplayBuffer,
batch_size=128, GAMMA=0.99, beta=0.4,
device='cuda'):
if len(memory) < batch_size:
return 0
data = memory.sample(batch_size, beta)
state_batch = torch.FloatTensor(data[0]).to(device)
batch_len = state_batch.size(0)
action_batch = torch.from_numpy(data[1]).to(device).view(batch_len, 1)
reward_batch = torch.FloatTensor(data[2]).to(device).view(batch_len, 1)
n_state_batch = torch.FloatTensor(data[3]).to(device)
done_batch = torch.FloatTensor(data[4]).to(device).view(batch_len, 1)
idxs = data[6]
total_loss = 0
all_loss = 0
for ens_num in range(policy_net.get_num_ensembles()):
q = policy_net(state_batch, ens_num=ens_num).gather(1, action_batch)
nq = target_net(n_state_batch, ens_num=ens_num).max(1)[0].detach()
# Compute the expected Q values
expected_state_action_values = (
nq * GAMMA)*(1.-done_batch[:, 0]) + reward_batch[:, 0]
all_loss += torch.abs(q - expected_state_action_values.unsqueeze(1))
# Compute Huber loss
loss = F.smooth_l1_loss(q, expected_state_action_values.unsqueeze(1))
total_loss += loss
# Optimize the model
optimizer.zero_grad()
total_loss.backward()
optimizer.step()
avg_loss = all_loss / policy_net.get_num_ensembles()
priorities = avg_loss.squeeze().tolist()
memory.update_priorities(idxs, priorities)
return total_loss.detach() / policy_net.get_num_ensembles()
def std_opt_bootstrap_dqn(policy_net, target_net, optimizer, memory,
batch_size=128, GAMMA=0.99, device='cuda'):
"""
Apply the standard procedure to an ensemble of deep Q network.
"""
if len(memory) < batch_size:
return 0
state_batch, action_batch, reward_batch, n_state_batch, done_batch = memory.sample(
batch_size)
state_batch = state_batch.to(device)
action_batch = action_batch.to(device)
reward_batch = reward_batch.to(device)
n_state_batch = n_state_batch.to(device)
done_batch = done_batch.to(device)
total_loss = 0
qs = policy_net(state_batch, training=True)
nqs = target_net(n_state_batch, training=True)
for i in range(len(qs)):
q = qs[i].gather(1, action_batch)
nq = nqs[i].max(1)[0].detach()
# Compute the expected Q values
expected_state_action_values = (
nq * GAMMA)*(1.-done_batch[:, 0]) + reward_batch[:, 0]
# Compute Huber loss
loss = F.smooth_l1_loss(q, expected_state_action_values.unsqueeze(1))
total_loss += loss
# Optimize the model
optimizer.zero_grad()
total_loss.backward()
optimizer.step()
return total_loss / len(qs)
def to_policy(q_values, tau=0.1):
return F.softmax(q_values / tau, dim=1)
| 34.450882
| 138
| 0.647145
| 3,733
| 27,354
| 4.458345
| 0.048754
| 0.062489
| 0.032025
| 0.040558
| 0.916662
| 0.903082
| 0.88746
| 0.862525
| 0.842817
| 0.829238
| 0
| 0.019974
| 0.244096
| 27,354
| 793
| 139
| 34.494325
| 0.78493
| 0.055751
| 0
| 0.766038
| 0
| 0
| 0.003275
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.04717
| false
| 0
| 0.011321
| 0.001887
| 0.139623
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
46162bea42edd6a3890bd87220541fd6a2426ca8
| 26,111
|
py
|
Python
|
timingandestimationplugin/reports.py
|
AccelerationNet/trac-timingandestimationplugin
|
3217cd1ed8693f39c89a9d34b38bec3af0a0b99d
|
[
"MIT"
] | null | null | null |
timingandestimationplugin/reports.py
|
AccelerationNet/trac-timingandestimationplugin
|
3217cd1ed8693f39c89a9d34b38bec3af0a0b99d
|
[
"MIT"
] | 1
|
2015-02-26T22:30:26.000Z
|
2015-03-10T15:19:07.000Z
|
timingandestimationplugin/reports.py
|
AccelerationNet/trac-timingandestimationplugin
|
3217cd1ed8693f39c89a9d34b38bec3af0a0b99d
|
[
"MIT"
] | null | null | null |
# IF YOU ADD A NEW SECTION OF REPORTS, You will need to make
# sure that section is also added to the all_reports hashtable
# near the bottom
#Please try to keep this clean"
billing_reports = [
{
"uuid":"b24f08c0-d41f-4c63-93a5-25e18a8513c2",
"title":"Ticket Work Summary",
"version":21,
"sql":"""
SELECT __ticket__ as __group__, __style__, ticket,
newvalue as Work_added, author, time as datetime, _ord
FROM(
SELECT '' as __style__, author,
t.summary as __ticket__,
t.id as ticket,
CASE WHEN newvalue = '' OR newvalue IS NULL THEN 0
ELSE CAST( newvalue AS DECIMAL ) END AS newvalue,
ticket_change.time as time, 0 as _ord
FROM ticket_change
JOIN ticket t on t.id = ticket_change.ticket
LEFT JOIN ticket_custom as billable on billable.ticket = t.id
and billable.name = 'billable'
WHERE field = 'hours' and
t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
AND ticket_change.time >= $STARTDATE
AND ticket_change.time < $ENDDATE
UNION
SELECT 'background-color:#DFE;' as __style__,
'Total work done on the ticket in the selected time period ' as author,
t.summary as __ticket__,
t.id as ticket,
SUM( CASE WHEN newvalue = '' OR newvalue IS NULL THEN 0
ELSE CAST( newvalue AS DECIMAL ) END ) as newvalue,
NULL as time, 1 as _ord
FROM ticket_change
JOIN ticket t on t.id = ticket_change.ticket
LEFT JOIN ticket_custom as billable on billable.ticket = t.id
and billable.name = 'billable'
WHERE field = 'hours' and
t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
AND ticket_change.time >= $STARTDATE
AND ticket_change.time < $ENDDATE
GROUP By t.id, t.summary
) as tbl
ORDER BY __ticket__, _ord ASC, time ASC
"""
},#END Ticket work summary
{
"uuid":"af13564f-0e36-4a17-96c0-632dc68d8d14",
"title":"Milestone Work Summary",
"version":18,
"sql":"""
SELECT
milestone as __group__, __style__, ticket, summary, newvalue as Work_added,
time as datetime, _ord
FROM(
SELECT '' as __style__, t.id as ticket,
SUM( CASE WHEN newvalue = '' OR newvalue IS NULL THEN 0
ELSE CAST( newvalue AS DECIMAL ) END) as newvalue, t.summary as summary,
MAX(ticket_change.time) as time, t.milestone as milestone, 0 as _ord
FROM ticket_change
JOIN ticket t on t.id = ticket_change.ticket
LEFT JOIN ticket_custom as billable on billable.ticket = t.id
and billable.name = 'billable'
WHERE field = 'hours' and
t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
AND ticket_change.time >= $STARTDATE
AND ticket_change.time < $ENDDATE
GROUP BY t.milestone, t.id, t.summary
UNION
SELECT 'background-color:#DFE;' as __style__, 0 as ticket,
sum( CASE WHEN newvalue = '' OR newvalue IS NULL THEN 0
ELSE CAST( newvalue AS DECIMAL ) END) as newvalue, 'Total work done' as summary,
NULL as time, t.milestone as milestone, 1 as _ord
FROM ticket_change
JOIN ticket t on t.id = ticket_change.ticket
LEFT JOIN ticket_custom as billable on billable.ticket = t.id
and billable.name = 'billable'
WHERE field = 'hours' and
t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
AND ticket_change.time >= $STARTDATE
AND ticket_change.time < $ENDDATE
GROUP By t.milestone
) as tbl
ORDER BY milestone, _ord ASC, ticket, time
"""
},#END Milestone work summary
{
"uuid":"7bd4b0ce-da6d-4b11-8be3-07e65b540d99",
"title":"Developer Work Summary",
"version":18,
"sql":"""
SELECT author as __group__,__style__, ticket, summary,
newvalue as Work_added, time as datetime, _ord
FROM(
SELECT '' as __style__, author, t.id as ticket,
t.summary as summary,
CASE WHEN newvalue = '' OR newvalue IS NULL THEN 0
ELSE CAST( newvalue AS DECIMAL ) END as newvalue,
ticket_change.time as time, 0 as _ord
FROM ticket_change
JOIN ticket t on t.id = ticket_change.ticket
LEFT JOIN ticket_custom as billable on billable.ticket = t.id
and billable.name = 'billable'
WHERE field = 'hours' and
t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
AND ticket_change.time >= $STARTDATE
AND ticket_change.time < $ENDDATE
UNION
SELECT 'background-color:#DFE;' as __style__, author, 0 as ticket,
Null as summary,
SUM( CASE WHEN newvalue = '' OR newvalue IS NULL THEN 0
ELSE CAST( newvalue AS DECIMAL ) END) as newvalue,
NULL as time, 1 as _ord
FROM ticket_change
JOIN ticket t on t.id = ticket_change.ticket
LEFT JOIN ticket_custom as billable on billable.ticket = t.id
and billable.name = 'billable'
WHERE field = 'hours' and
t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
AND ticket_change.time >= $STARTDATE
AND ticket_change.time < $ENDDATE
GROUP By author
) as tbl
ORDER BY author, _ord ASC, time
"""
},#END Hours Per Developer
]
th_version =17
ticket_hours_reports = [
{
"uuid":"8d785cdb-dcf5-43c9-b2a6-216997b0011a",
"title": "Ticket Hours",
"version":th_version,
"sql": """
SELECT __color__, __style__, ticket, summary, component ,version, severity,
milestone, status, owner, Estimated_work, Total_work, billable,_ord
FROM (
SELECT p.value AS __color__,
'' as __style__,
t.id AS ticket, summary AS summary, -- ## Break line here
component,version, severity, milestone, status, owner,
CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END as Estimated_work,
CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END as Total_work,
CASE WHEN billable.value = '1' THEN 'Y' else 'N' END as billable,
time AS created, changetime AS modified, -- ## Dates are formatted
description AS _description_, -- ## Uses a full row
changetime AS _changetime,
reporter AS _reporter
,0 as _ord
FROM ticket as t
LEFT JOIN enum as p ON p.name=t.priority AND p.type='priority'
LEFT JOIN ticket_custom as EstimatedHours ON EstimatedHours.name='estimatedhours'
AND EstimatedHours.Ticket = t.Id
LEFT JOIN ticket_custom as totalhours ON totalhours.name='totalhours'
AND totalhours.Ticket = t.Id
LEFT JOIN ticket_custom as billable ON billable.name='billable'
AND billable.Ticket = t.Id
WHERE t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
UNION
SELECT '1' AS __color__,
'background-color:#DFE;' as __style__,
0 as ticket, 'Total' AS summary,
NULL as component,NULL as version, NULL as severity, NULL as milestone,
'Time Remaining: ' as status,
CAST(
SUM(CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END) -
SUM(CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END)
AS CHAR(512)) as owner,
SUM(CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END) as Estimated_work,
SUM(CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END) as Total_work,
NULL as billable,
NULL as created, NULL as modified, -- ## Dates are formatted
NULL AS _description_,
NULL AS _changetime,
NULL AS _reporter
,1 as _ord
FROM ticket as t
LEFT JOIN enum as p ON p.name=t.priority AND p.type='priority'
LEFT JOIN ticket_custom as EstimatedHours ON EstimatedHours.name='estimatedhours'
AND EstimatedHours.Ticket = t.Id
LEFT JOIN ticket_custom as totalhours ON totalhours.name='totalhours'
AND totalhours.Ticket = t.Id
LEFT JOIN ticket_custom as billable ON billable.name='billable'
AND billable.Ticket = t.Id
WHERE t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
) as tbl
ORDER BY _ord ASC, ticket
"""
},
#END Ticket Hours
{
"uuid":"71e7c36d-e512-4d0b-b499-087d4d20ff0b",
"title": "Ticket Hours with Description",
"version":th_version,
"sql": """
SELECT __color__, __style__, ticket, summary, component ,version, severity,
milestone, status, owner, Estimated_work, Total_work, billable
-- ## ,created, modified, -- ## Dates are formatted
,_description_
-- ## _changetime,
-- ## _reporter
,_ord
FROM (
SELECT p.value AS __color__,
'' as __style__,
t.id AS ticket, summary AS summary, -- ## Break line here
component,version, severity, milestone, status, owner,
CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END as Estimated_work,
CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END as Total_work,
CASE WHEN billable.value = '1' THEN 'Y'
else 'N'
END as billable,
time AS created, changetime AS modified, -- ## Dates are formatted
description AS _description_, -- ## Uses a full row
changetime AS _changetime,
reporter AS _reporter
,0 as _ord
FROM ticket as t
LEFT JOIN enum as p ON p.name=t.priority AND p.type='priority'
LEFT JOIN ticket_custom as EstimatedHours ON EstimatedHours.name='estimatedhours'
AND EstimatedHours.Ticket = t.Id
LEFT JOIN ticket_custom as totalhours ON totalhours.name='totalhours'
AND totalhours.Ticket = t.Id
LEFT JOIN ticket_custom as billable ON billable.name='billable'
AND billable.Ticket = t.Id
WHERE t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
UNION
SELECT '1' AS __color__,
'background-color:#DFE;' as __style__,
0 as ticket, 'Total' AS summary,
NULL as component,NULL as version, NULL as severity, NULL as milestone,
'Time Remaining: ' as status,
CAST(
SUM(CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END) -
SUM(CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END)
AS CHAR(512)) as owner,
SUM(CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END) as Estimated_work,
SUM(CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END) as Total_work,
NULL as billable,
NULL as created, NULL as modified, -- ## Dates are formatted
NULL AS _description_,
NULL AS _changetime,
NULL AS _reporter
,1 as _ord
FROM ticket as t
LEFT JOIN enum as p ON p.name=t.priority AND p.type='priority'
LEFT JOIN ticket_custom as EstimatedHours ON EstimatedHours.name='estimatedhours'
AND EstimatedHours.Ticket = t.Id
LEFT JOIN ticket_custom as totalhours ON totalhours.name='totalhours'
AND totalhours.Ticket = t.Id
LEFT JOIN ticket_custom as billable ON billable.name='billable'
AND billable.Ticket = t.Id
WHERE t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
) as tbl
ORDER BY _ord ASC, ticket
"""
},
#END Ticket Hours
{
"uuid":"5f33b102-e6a6-47e8-976c-ac7a6794a909",
"title":"Ticket Hours Grouped By Component",
"version":th_version,
"sql": """
SELECT __color__, __group__, __style__, ticket, summary, __component__ ,version,
severity, milestone, status, owner, Estimated_work, total_work, billable,
_ord
FROM (
SELECT p.value AS __color__,
t.component AS __group__,
'' as __style__,
t.id AS ticket, summary AS summary, -- ## Break line here
component as __component__,version, severity, milestone, status, owner,
CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END as Estimated_work,
CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END as Total_work,
CASE WHEN billable.value = '1' THEN 'Y'
else 'N'
END as billable,
time AS created, changetime AS modified, -- ## Dates are formatted
description AS _description_, -- ## Uses a full row
changetime AS _changetime,
reporter AS _reporter
,0 as _ord
FROM ticket as t
LEFT JOIN enum as p ON p.name=t.priority AND p.type='priority'
LEFT JOIN ticket_custom as EstimatedHours ON EstimatedHours.name='estimatedhours'
AND EstimatedHours.Ticket = t.Id
LEFT JOIN ticket_custom as totalhours ON totalhours.name='totalhours'
AND totalhours.Ticket = t.Id
LEFT JOIN ticket_custom as billable ON billable.name='billable'
AND billable.Ticket = t.Id
WHERE t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
UNION
SELECT '1' AS __color__,
t.component AS __group__,
'background-color:#DFE;' as __style__,
0 as ticket, 'Total work' AS summary,
t.component as __component__, NULL as version, NULL as severity,
NULL as milestone, 'Time Remaining: ' as status,
CAST(
SUM(CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END) -
SUM(CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END)
AS CHAR(512)) as owner,
SUM(CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END) as Estimated_work,
SUM(CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END) as Total_work,
NULL as billable,
NULL as created,
NULL as modified, -- ## Dates are formatted
NULL AS _description_,
NULL AS _changetime,
NULL AS _reporter
,1 as _ord
FROM ticket as t
LEFT JOIN enum as p ON p.name=t.priority AND p.type='priority'
LEFT JOIN ticket_custom as EstimatedHours ON EstimatedHours.name='estimatedhours'
AND EstimatedHours.Ticket = t.Id
LEFT JOIN ticket_custom as totalhours ON totalhours.name='totalhours'
AND totalhours.Ticket = t.Id
LEFT JOIN ticket_custom as billable ON billable.name='billable'
AND billable.Ticket = t.Id
WHERE t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
GROUP BY t.component
) as tbl
ORDER BY __component__, _ord ASC,ticket
"""
},
# END Ticket Hours GROUPED BY COMPONENT
{
"uuid":"7816f034-a174-4a94-aed6-358fb648b2fc",
"title":"Ticket Hours Grouped By Component with Description",
"version":th_version,
"sql": """
SELECT __color__, __group__, __style__, ticket, summary, __component__ ,
version, severity, milestone, status, owner, Estimated_work, Total_work,
billable, _description_, _ord
FROM (
SELECT p.value AS __color__,
t.component AS __group__,
'' as __style__,
t.id AS ticket, summary AS summary, -- ## Break line here
component as __component__, version, severity, milestone, status, owner,
CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END as Estimated_work,
CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END as Total_work,
CASE WHEN billable.value = '1' THEN 'Y' else 'N' END as billable,
time AS created, changetime AS modified, -- ## Dates are formatted
description AS _description_, -- ## Uses a full row
changetime AS _changetime,
reporter AS _reporter
,0 as _ord
FROM ticket as t
LEFT JOIN enum as p ON p.name=t.priority AND p.type='priority'
LEFT JOIN ticket_custom as EstimatedHours ON EstimatedHours.name='estimatedhours'
AND EstimatedHours.Ticket = t.Id
LEFT JOIN ticket_custom as totalhours ON totalhours.name='totalhours'
AND totalhours.Ticket = t.Id
LEFT JOIN ticket_custom as billable ON billable.name='billable'
AND billable.Ticket = t.Id
WHERE t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
UNION
SELECT '1' AS __color__,
t.component AS __group__,
'background-color:#DFE;' as __style__,
0 as ticket, 'Total work' AS summary,
t.component as __component__, NULL as version, NULL as severity,
NULL as milestone, 'Time Remaining: ' as status,
CAST(
SUM(CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END) -
SUM(CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END)
AS CHAR(512)) as owner,
SUM(CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END) as Estimated_work,
SUM(CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END) as Total_work,
NULL as billable,
NULL as created, NULL as modified, -- ## Dates are formatted
NULL AS _description_,
NULL AS _changetime,
NULL AS _reporter
,1 as _ord
FROM ticket as t
LEFT JOIN enum as p ON p.name=t.priority AND p.type='priority'
LEFT JOIN ticket_custom as EstimatedHours ON EstimatedHours.name='estimatedhours'
AND EstimatedHours.Ticket = t.Id
LEFT JOIN ticket_custom as totalhours ON totalhours.name='totalhours'
AND totalhours.Ticket = t.Id
LEFT JOIN ticket_custom as billable ON billable.name='billable'
AND billable.Ticket = t.Id
WHERE t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
GROUP BY t.component
) as tbl
ORDER BY __component__, _ord ASC, ticket
"""
},
# END Ticket Hours Grouped BY Component with Description
{
"uuid":"03815803-7688-4f3a-8e65-8d254cc1d1fb",
"title":"Ticket Hours Grouped By Milestone",
"version":th_version,
"sql": """
SELECT __color__, __group__, __style__, ticket, summary, component ,version,
severity, __milestone__, status, owner, Estimated_work, Total_work, billable,
_ord
FROM (
SELECT p.value AS __color__,
t.milestone AS __group__,
'' as __style__,
t.id AS ticket, summary AS summary, -- ## Break line here
component,version, severity, milestone as __milestone__, status, owner,
CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END as Estimated_work,
CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END as Total_work,
CASE WHEN billable.value = '1' THEN 'Y'
else 'N'
END as billable,
time AS created, changetime AS modified, -- ## Dates are formatted
description AS _description_, -- ## Uses a full row
changetime AS _changetime,
reporter AS _reporter, 0 as _ord
FROM ticket as t
LEFT JOIN enum as p ON p.name=t.priority AND p.type='priority'
LEFT JOIN ticket_custom as EstimatedHours ON EstimatedHours.name='estimatedhours'
AND EstimatedHours.Ticket = t.Id
LEFT JOIN ticket_custom as totalhours ON totalhours.name='totalhours'
AND totalhours.Ticket = t.Id
LEFT JOIN ticket_custom as billable ON billable.name='billable'
AND billable.Ticket = t.Id
WHERE t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
UNION
SELECT '1' AS __color__,
t.milestone AS __group__,
'background-color:#DFE;' as __style__,
0 as ticket, 'Total work' AS summary,
NULL as component,NULL as version, NULL as severity,
t.milestone as __milestone__, 'Time Remaining: ' as status,
CAST(
SUM(CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END) -
SUM(CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END)
AS CHAR(512)) as owner,
SUM(CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END) as Estimated_work,
SUM(CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END) as Total_work,
NULL as billable,
NULL as created, NULL as modified, -- ## Dates are formatted
NULL AS _description_,
NULL AS _changetime,
NULL AS _reporter
,1 as _ord
FROM ticket as t
LEFT JOIN enum as p ON p.name=t.priority AND p.type='priority'
LEFT JOIN ticket_custom as EstimatedHours ON EstimatedHours.name='estimatedhours'
AND EstimatedHours.Ticket = t.Id
LEFT JOIN ticket_custom as totalhours ON totalhours.name='totalhours'
AND totalhours.Ticket = t.Id
LEFT JOIN ticket_custom as billable ON billable.name='billable'
AND billable.Ticket = t.Id
WHERE t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
GROUP BY t.milestone
) as tbl
ORDER BY __milestone__, _ord ASC, ticket
"""
},
#END Ticket Hours Grouped By MileStone
{
"uuid":"040c9025-7641-4d18-96ad-2b26b4095566",
"title":"Ticket Hours Grouped By MileStone with Description",
"version":th_version,
"sql": """
SELECT __color__, __group__, __style__, ticket, summary, component ,version, severity,
__milestone__, status, owner, Estimated_work, Total_work, billable,
_description_, _ord
FROM (
SELECT p.value AS __color__,
t.milestone AS __group__,
'' as __style__,
t.id AS ticket, summary AS summary, -- ## Break line here
component,version, severity, milestone as __milestone__, status, owner,
CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END as Estimated_work,
CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END as Total_work,
CASE WHEN billable.value = '1' THEN 'Y'
else 'N'
END as billable,
time AS created, changetime AS modified, -- ## Dates are formatted
description AS _description_, -- ## Uses a full row
changetime AS _changetime,
reporter AS _reporter
,0 as _ord
FROM ticket as t
LEFT JOIN enum as p ON p.name=t.priority AND p.type='priority'
LEFT JOIN ticket_custom as EstimatedHours ON EstimatedHours.name='estimatedhours'
AND EstimatedHours.Ticket = t.Id
LEFT JOIN ticket_custom as totalhours ON totalhours.name='totalhours'
AND totalhours.Ticket = t.Id
LEFT JOIN ticket_custom as billable ON billable.name='billable'
AND billable.Ticket = t.Id
WHERE t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
UNION
SELECT '1' AS __color__,
t.milestone AS __group__,
'background-color:#DFE;' as __style__,
0 as ticket, 'Total work' AS summary,
NULL as component,NULL as version, NULL as severity,
t.milestone as __milestone__,
'Time Remaining: ' as status,
CAST(
SUM(CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END) -
SUM(CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END)
AS CHAR(512)) as owner,
SUM(CASE WHEN EstimatedHours.value = '' OR EstimatedHours.value IS NULL THEN 0
ELSE CAST( EstimatedHours.value AS DECIMAL ) END) as Estimated_work,
SUM(CASE WHEN totalhours.value = '' OR totalhours.value IS NULL THEN 0
ELSE CAST( totalhours.value AS DECIMAL ) END) as Total_work,
NULL as billable,
NULL as created, NULL as modified, -- ## Dates are formatted
NULL AS _description_,
NULL AS _changetime,
NULL AS _reporter, 1 as _ord
FROM ticket as t
LEFT JOIN enum as p ON p.name=t.priority AND p.type='priority'
LEFT JOIN ticket_custom as EstimatedHours ON EstimatedHours.name='estimatedhours'
AND EstimatedHours.Ticket = t.Id
LEFT JOIN ticket_custom as totalhours ON totalhours.name='totalhours'
AND totalhours.Ticket = t.Id
LEFT JOIN ticket_custom as billable ON billable.name='billable'
AND billable.Ticket = t.Id
WHERE t.status IN (#STATUSES#)
AND billable.value in ($BILLABLE, $UNBILLABLE)
GROUP BY t.milestone
) as tbl
ORDER BY __milestone__, _ord ASC, ticket
"""
}
#END Ticket Hours Grouped By MileStone with Description
]
all_reports = [
{"title":"Billing Reports",
"description": "Currently the billing reports are the only time based reports, and are therefore useful for getting an idea of what tickets had times (and totals), and which developers spent their time where. ",
"reports":billing_reports},
{"title":"Ticket/Hour Reports",
"description": "These reports are useful for reviewing estimates on a large scale or getting an idea of the project at large. These reports currently ignore the start/end dates. ",
"reports": ticket_hours_reports}
]
| 38.511799
| 216
| 0.679062
| 3,546
| 26,111
| 4.849408
| 0.056684
| 0.010468
| 0.023029
| 0.026867
| 0.919167
| 0.916143
| 0.905094
| 0.899337
| 0.896953
| 0.894161
| 0
| 0.015093
| 0.23875
| 26,111
| 677
| 217
| 38.568685
| 0.850028
| 0.017387
| 0
| 0.822107
| 0
| 0.006908
| 0.968334
| 0.073821
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1caa77a0347bb7a9baef88bab93f0815d2295712
| 77,144
|
py
|
Python
|
code/python/AxiomaEquityOptimizer/v3/fds/sdk/AxiomaEquityOptimizer/api/axp_optimizer_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 6
|
2022-02-07T16:34:18.000Z
|
2022-03-30T08:04:57.000Z
|
code/python/AxiomaEquityOptimizer/v3/fds/sdk/AxiomaEquityOptimizer/api/axp_optimizer_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | 2
|
2022-02-07T05:25:57.000Z
|
2022-03-07T14:18:04.000Z
|
code/python/AxiomaEquityOptimizer/v3/fds/sdk/AxiomaEquityOptimizer/api/axp_optimizer_api.py
|
factset/enterprise-sdk
|
3fd4d1360756c515c9737a0c9a992c7451d7de7e
|
[
"Apache-2.0"
] | null | null | null |
"""
Axioma Equity API
Allow clients to fetch Analytics through APIs. # noqa: E501
The version of the OpenAPI document: 3
Contact: analytics.api.support@factset.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from multiprocessing.pool import ApplyResult
import typing
from fds.sdk.AxiomaEquityOptimizer.api_client import ApiClient, Endpoint as _Endpoint
from fds.sdk.AxiomaEquityOptimizer.model_utils import ( # noqa: F401
check_allowed_values,
check_validations,
date,
datetime,
file_type,
none_type,
validate_and_convert_types
)
from fds.sdk.AxiomaEquityOptimizer.exceptions import ApiException
from fds.sdk.AxiomaEquityOptimizer.model.axioma_equity_optimization_parameters_root import AxiomaEquityOptimizationParametersRoot
from fds.sdk.AxiomaEquityOptimizer.model.calculation_info_root import CalculationInfoRoot
from fds.sdk.AxiomaEquityOptimizer.model.client_error_response import ClientErrorResponse
from fds.sdk.AxiomaEquityOptimizer.model.object_root import ObjectRoot
class PostAndOptimizeResponseWrapper:
def __init__(self, status_code: int, response: object):
"""
This constructor initializes the new PostAndOptimizeResponseWrapper
to status_code, response
Args:
response (object): Raw response
status_code (int): Http status code of the response
"""
self.status_code = status_code
self.response = response
def get_status_code(self) -> int:
"""
Returns: Http status code of the response
"""
return self.status_code
def get_response(self) -> object:
"""
Returns: Raw Object response
"""
return self.response
def get_response_201(self) -> ObjectRoot:
"""
Raises: ApiException: Invalid response getter called.
Returns: Expected response, returns json if optimization is completed in a short span.
"""
if self.status_code != 201:
raise ApiException(
status=500,
reason="Invalid response getter called. get_response_201 can't return a " + self.status_code + " response"
)
return self.response
def get_response_202(self) -> CalculationInfoRoot:
"""
Raises: ApiException: Invalid response getter called.
Returns: Expected response, contains the poll URL in the Location header.
"""
if self.status_code != 202:
raise ApiException(
status=500,
reason="Invalid response getter called. get_response_202 can't return a " + self.status_code + " response"
)
return self.response
class PutAndOptimizeResponseWrapper:
def __init__(self, status_code: int, response: object):
"""
This constructor initializes the new PutAndOptimizeResponseWrapper
to status_code, response
Args:
response (object): Raw response
status_code (int): Http status code of the response
"""
self.status_code = status_code
self.response = response
def get_status_code(self) -> int:
"""
Returns: Http status code of the response
"""
return self.status_code
def get_response(self) -> object:
"""
Returns: Raw Object response
"""
return self.response
def get_response_201(self) -> ObjectRoot:
"""
Raises: ApiException: Invalid response getter called.
Returns: Expected response, returns json if optimization is completed in a short span.
"""
if self.status_code != 201:
raise ApiException(
status=500,
reason="Invalid response getter called. get_response_201 can't return a " + self.status_code + " response"
)
return self.response
def get_response_202(self) -> CalculationInfoRoot:
"""
Raises: ApiException: Invalid response getter called.
Returns: Expected response, contains the poll URL in the Location header.
"""
if self.status_code != 202:
raise ApiException(
status=500,
reason="Invalid response getter called. get_response_202 can't return a " + self.status_code + " response"
)
return self.response
class AXPOptimizerApi(object):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
self.cancel_optimization_by_id_endpoint = _Endpoint(
settings={
'response_type': None,
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/analytics/engines/axp/v3/optimizations/{id}',
'operation_id': 'cancel_optimization_by_id',
'http_method': 'DELETE',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'text/plain',
'application/json',
'text/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_optimization_parameters_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (AxiomaEquityOptimizationParametersRoot,), 400: (ClientErrorResponse,), 404: (ClientErrorResponse,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/analytics/engines/axp/v3/optimizations/{id}',
'operation_id': 'get_optimization_parameters',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_optimization_result_endpoint = _Endpoint(
settings={
'response_type': (
{ 200: (ObjectRoot,), 400: (ClientErrorResponse,), 404: (ClientErrorResponse,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/analytics/engines/axp/v3/optimizations/{id}/result',
'operation_id': 'get_optimization_result',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
'accept',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
'accept':
(str,),
},
'attribute_map': {
'id': 'id',
'accept': 'Accept',
},
'location_map': {
'id': 'path',
'accept': 'header',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.get_optimization_status_by_id_endpoint = _Endpoint(
settings={
'response_type': (
{ 201: (ObjectRoot,), 400: (ClientErrorResponse,), 404: (ClientErrorResponse,), },
None
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/analytics/engines/axp/v3/optimizations/{id}/status',
'operation_id': 'get_optimization_status_by_id',
'http_method': 'GET',
'servers': None,
},
params_map={
'all': [
'id',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
},
'attribute_map': {
'id': 'id',
},
'location_map': {
'id': 'path',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [],
},
api_client=api_client
)
self.post_and_optimize_endpoint = _Endpoint(
settings={
'response_type': (
{ 201: (ObjectRoot,), 202: (CalculationInfoRoot,), 400: (ClientErrorResponse,), 404: (ClientErrorResponse,), },
PostAndOptimizeResponseWrapper
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/analytics/engines/axp/v3/optimizations',
'operation_id': 'post_and_optimize',
'http_method': 'POST',
'servers': None,
},
params_map={
'all': [
'x_fact_set_api_long_running_deadline',
'cache_control',
'axioma_equity_optimization_parameters_root',
],
'required': [],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'x_fact_set_api_long_running_deadline':
(int,),
'cache_control':
(str,),
'axioma_equity_optimization_parameters_root':
(AxiomaEquityOptimizationParametersRoot,),
},
'attribute_map': {
'x_fact_set_api_long_running_deadline': 'X-FactSet-Api-Long-Running-Deadline',
'cache_control': 'Cache-Control',
},
'location_map': {
'x_fact_set_api_long_running_deadline': 'header',
'cache_control': 'header',
'axioma_equity_optimization_parameters_root': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
self.put_and_optimize_endpoint = _Endpoint(
settings={
'response_type': (
{ 201: (ObjectRoot,), 202: (CalculationInfoRoot,), 400: (ClientErrorResponse,), 404: (ClientErrorResponse,), },
PutAndOptimizeResponseWrapper
),
'auth': [
'FactSetApiKey',
'FactSetOAuth2'
],
'endpoint_path': '/analytics/engines/axp/v3/optimizations/{id}',
'operation_id': 'put_and_optimize',
'http_method': 'PUT',
'servers': None,
},
params_map={
'all': [
'id',
'x_fact_set_api_long_running_deadline',
'cache_control',
'axioma_equity_optimization_parameters_root',
],
'required': [
'id',
],
'nullable': [
],
'enum': [
],
'validation': [
]
},
root_map={
'validations': {
},
'allowed_values': {
},
'openapi_types': {
'id':
(str,),
'x_fact_set_api_long_running_deadline':
(int,),
'cache_control':
(str,),
'axioma_equity_optimization_parameters_root':
(AxiomaEquityOptimizationParametersRoot,),
},
'attribute_map': {
'id': 'id',
'x_fact_set_api_long_running_deadline': 'X-FactSet-Api-Long-Running-Deadline',
'cache_control': 'Cache-Control',
},
'location_map': {
'id': 'path',
'x_fact_set_api_long_running_deadline': 'header',
'cache_control': 'header',
'axioma_equity_optimization_parameters_root': 'body',
},
'collection_format_map': {
}
},
headers_map={
'accept': [
'application/json'
],
'content_type': [
'application/json'
]
},
api_client=api_client
)
@staticmethod
def apply_kwargs_defaults(kwargs, return_http_data_only, async_req):
kwargs["async_req"] = async_req
kwargs["_return_http_data_only"] = return_http_data_only
kwargs["_preload_content"] = kwargs.get("_preload_content", True)
kwargs["_request_timeout"] = kwargs.get("_request_timeout", None)
kwargs["_check_input_type"] = kwargs.get("_check_input_type", True)
kwargs["_check_return_type"] = kwargs.get("_check_return_type", True)
kwargs["_spec_property_naming"] = kwargs.get("_spec_property_naming", False)
kwargs["_content_type"] = kwargs.get("_content_type")
kwargs["_host_index"] = kwargs.get("_host_index")
def cancel_optimization_by_id(
self,
id,
**kwargs
) -> None:
"""Cancel Axioma optimization by id # noqa: E501
This is the endpoint to cancel a previously submitted optimization. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
None
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['id'] = \
id
return self.cancel_optimization_by_id_endpoint.call_with_http_info(**kwargs)
def cancel_optimization_by_id_with_http_info(
self,
id,
**kwargs
) -> typing.Tuple[None, int, typing.MutableMapping]:
"""Cancel Axioma optimization by id # noqa: E501
This is the endpoint to cancel a previously submitted optimization. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
None
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['id'] = \
id
return self.cancel_optimization_by_id_endpoint.call_with_http_info(**kwargs)
def cancel_optimization_by_id_async(
self,
id,
**kwargs
) -> "ApplyResult[None]":
"""Cancel Axioma optimization by id # noqa: E501
This is the endpoint to cancel a previously submitted optimization. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[None]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['id'] = \
id
return self.cancel_optimization_by_id_endpoint.call_with_http_info(**kwargs)
def cancel_optimization_by_id_with_http_info_async(
self,
id,
**kwargs
) -> "ApplyResult[typing.Tuple[None, int, typing.MutableMapping]]":
"""Cancel Axioma optimization by id # noqa: E501
This is the endpoint to cancel a previously submitted optimization. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(None, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['id'] = \
id
return self.cancel_optimization_by_id_endpoint.call_with_http_info(**kwargs)
def get_optimization_parameters(
self,
id,
**kwargs
) -> AxiomaEquityOptimizationParametersRoot:
"""Get Axioma optimization parameters by id # noqa: E501
This is the endpoint that returns the optimization parameters passed for a calculation. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
AxiomaEquityOptimizationParametersRoot
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['id'] = \
id
return self.get_optimization_parameters_endpoint.call_with_http_info(**kwargs)
def get_optimization_parameters_with_http_info(
self,
id,
**kwargs
) -> typing.Tuple[AxiomaEquityOptimizationParametersRoot, int, typing.MutableMapping]:
"""Get Axioma optimization parameters by id # noqa: E501
This is the endpoint that returns the optimization parameters passed for a calculation. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
AxiomaEquityOptimizationParametersRoot
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['id'] = \
id
return self.get_optimization_parameters_endpoint.call_with_http_info(**kwargs)
def get_optimization_parameters_async(
self,
id,
**kwargs
) -> "ApplyResult[AxiomaEquityOptimizationParametersRoot]":
"""Get Axioma optimization parameters by id # noqa: E501
This is the endpoint that returns the optimization parameters passed for a calculation. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[AxiomaEquityOptimizationParametersRoot]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['id'] = \
id
return self.get_optimization_parameters_endpoint.call_with_http_info(**kwargs)
def get_optimization_parameters_with_http_info_async(
self,
id,
**kwargs
) -> "ApplyResult[typing.Tuple[AxiomaEquityOptimizationParametersRoot, int, typing.MutableMapping]]":
"""Get Axioma optimization parameters by id # noqa: E501
This is the endpoint that returns the optimization parameters passed for a calculation. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(AxiomaEquityOptimizationParametersRoot, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['id'] = \
id
return self.get_optimization_parameters_endpoint.call_with_http_info(**kwargs)
def get_optimization_result(
self,
id,
**kwargs
) -> ObjectRoot:
"""Get Axioma optimization result by id # noqa: E501
This is the endpoint to get the result of a previously requested optimization. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
id (str): from url, provided from the location header in the Get Axioma optimization status by id endpoint
Keyword Args:
accept (str): Standard HTTP header. Value can be gzip, compress, deflate, br, identity and/or *. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ObjectRoot
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['id'] = \
id
return self.get_optimization_result_endpoint.call_with_http_info(**kwargs)
def get_optimization_result_with_http_info(
self,
id,
**kwargs
) -> typing.Tuple[ObjectRoot, int, typing.MutableMapping]:
"""Get Axioma optimization result by id # noqa: E501
This is the endpoint to get the result of a previously requested optimization. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
id (str): from url, provided from the location header in the Get Axioma optimization status by id endpoint
Keyword Args:
accept (str): Standard HTTP header. Value can be gzip, compress, deflate, br, identity and/or *. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ObjectRoot
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['id'] = \
id
return self.get_optimization_result_endpoint.call_with_http_info(**kwargs)
def get_optimization_result_async(
self,
id,
**kwargs
) -> "ApplyResult[ObjectRoot]":
"""Get Axioma optimization result by id # noqa: E501
This is the endpoint to get the result of a previously requested optimization. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
id (str): from url, provided from the location header in the Get Axioma optimization status by id endpoint
Keyword Args:
accept (str): Standard HTTP header. Value can be gzip, compress, deflate, br, identity and/or *. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[ObjectRoot]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['id'] = \
id
return self.get_optimization_result_endpoint.call_with_http_info(**kwargs)
def get_optimization_result_with_http_info_async(
self,
id,
**kwargs
) -> "ApplyResult[typing.Tuple[ObjectRoot, int, typing.MutableMapping]]":
"""Get Axioma optimization result by id # noqa: E501
This is the endpoint to get the result of a previously requested optimization. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
id (str): from url, provided from the location header in the Get Axioma optimization status by id endpoint
Keyword Args:
accept (str): Standard HTTP header. Value can be gzip, compress, deflate, br, identity and/or *. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(ObjectRoot, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['id'] = \
id
return self.get_optimization_result_endpoint.call_with_http_info(**kwargs)
def get_optimization_status_by_id(
self,
id,
**kwargs
) -> ObjectRoot:
"""Get Axioma optimization status by id # noqa: E501
This is the endpoint to check on the progress of a previously requested optimization. If the optimization has finished computing, the body of the response will contain result in JSON. Otherwise, the optimization is still running and the X-FactSet-Api-PickUp-Progress header will contain a progress percentage. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ObjectRoot
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['id'] = \
id
return self.get_optimization_status_by_id_endpoint.call_with_http_info(**kwargs)
def get_optimization_status_by_id_with_http_info(
self,
id,
**kwargs
) -> typing.Tuple[ObjectRoot, int, typing.MutableMapping]:
"""Get Axioma optimization status by id # noqa: E501
This is the endpoint to check on the progress of a previously requested optimization. If the optimization has finished computing, the body of the response will contain result in JSON. Otherwise, the optimization is still running and the X-FactSet-Api-PickUp-Progress header will contain a progress percentage. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ObjectRoot
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['id'] = \
id
return self.get_optimization_status_by_id_endpoint.call_with_http_info(**kwargs)
def get_optimization_status_by_id_async(
self,
id,
**kwargs
) -> "ApplyResult[ObjectRoot]":
"""Get Axioma optimization status by id # noqa: E501
This is the endpoint to check on the progress of a previously requested optimization. If the optimization has finished computing, the body of the response will contain result in JSON. Otherwise, the optimization is still running and the X-FactSet-Api-PickUp-Progress header will contain a progress percentage. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[ObjectRoot]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['id'] = \
id
return self.get_optimization_status_by_id_endpoint.call_with_http_info(**kwargs)
def get_optimization_status_by_id_with_http_info_async(
self,
id,
**kwargs
) -> "ApplyResult[typing.Tuple[ObjectRoot, int, typing.MutableMapping]]":
"""Get Axioma optimization status by id # noqa: E501
This is the endpoint to check on the progress of a previously requested optimization. If the optimization has finished computing, the body of the response will contain result in JSON. Otherwise, the optimization is still running and the X-FactSet-Api-PickUp-Progress header will contain a progress percentage. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(ObjectRoot, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['id'] = \
id
return self.get_optimization_status_by_id_endpoint.call_with_http_info(**kwargs)
def post_and_optimize(
self,
**kwargs
) -> PostAndOptimizeResponseWrapper:
"""Create and Run Axioma optimization # noqa: E501
This endpoint creates and runs Axioma optimization specified in the POST body parameters. Remarks: * Any settings in POST body will act as a one-time override over the settings saved in the strategy document. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Keyword Args:
x_fact_set_api_long_running_deadline (int): Long running deadline in seconds.. [optional]
cache_control (str): Standard HTTP header. Accepts no-cache, no-store, max-age, max-stale.. [optional]
axioma_equity_optimization_parameters_root (AxiomaEquityOptimizationParametersRoot): Calculation Parameters. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
PostAndOptimizeResponseWrapper
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
return self.post_and_optimize_endpoint.call_with_http_info(**kwargs)
def post_and_optimize_with_http_info(
self,
**kwargs
) -> typing.Tuple[PostAndOptimizeResponseWrapper, int, typing.MutableMapping]:
"""Create and Run Axioma optimization # noqa: E501
This endpoint creates and runs Axioma optimization specified in the POST body parameters. Remarks: * Any settings in POST body will act as a one-time override over the settings saved in the strategy document. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Keyword Args:
x_fact_set_api_long_running_deadline (int): Long running deadline in seconds.. [optional]
cache_control (str): Standard HTTP header. Accepts no-cache, no-store, max-age, max-stale.. [optional]
axioma_equity_optimization_parameters_root (AxiomaEquityOptimizationParametersRoot): Calculation Parameters. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
PostAndOptimizeResponseWrapper
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
return self.post_and_optimize_endpoint.call_with_http_info(**kwargs)
def post_and_optimize_async(
self,
**kwargs
) -> "ApplyResult[PostAndOptimizeResponseWrapper]":
"""Create and Run Axioma optimization # noqa: E501
This endpoint creates and runs Axioma optimization specified in the POST body parameters. Remarks: * Any settings in POST body will act as a one-time override over the settings saved in the strategy document. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Keyword Args:
x_fact_set_api_long_running_deadline (int): Long running deadline in seconds.. [optional]
cache_control (str): Standard HTTP header. Accepts no-cache, no-store, max-age, max-stale.. [optional]
axioma_equity_optimization_parameters_root (AxiomaEquityOptimizationParametersRoot): Calculation Parameters. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[PostAndOptimizeResponseWrapper]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
return self.post_and_optimize_endpoint.call_with_http_info(**kwargs)
def post_and_optimize_with_http_info_async(
self,
**kwargs
) -> "ApplyResult[typing.Tuple[PostAndOptimizeResponseWrapper, int, typing.MutableMapping]]":
"""Create and Run Axioma optimization # noqa: E501
This endpoint creates and runs Axioma optimization specified in the POST body parameters. Remarks: * Any settings in POST body will act as a one-time override over the settings saved in the strategy document. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Keyword Args:
x_fact_set_api_long_running_deadline (int): Long running deadline in seconds.. [optional]
cache_control (str): Standard HTTP header. Accepts no-cache, no-store, max-age, max-stale.. [optional]
axioma_equity_optimization_parameters_root (AxiomaEquityOptimizationParametersRoot): Calculation Parameters. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(PostAndOptimizeResponseWrapper, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
return self.post_and_optimize_endpoint.call_with_http_info(**kwargs)
def put_and_optimize(
self,
id,
**kwargs
) -> PutAndOptimizeResponseWrapper:
"""Create or Update Axioma optimization and run it. # noqa: E501
This endpoint updates and run the Axioma optimization specified in the PUT body parameters. It also allows the creation of new Axioma optimization with custom id. # noqa: E501
This method makes a synchronous HTTP request. Returns the http data only
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
x_fact_set_api_long_running_deadline (int): Long running deadline in seconds.. [optional]
cache_control (str): Standard HTTP header. Accepts no-cache, no-store, max-age, max-stale.. [optional]
axioma_equity_optimization_parameters_root (AxiomaEquityOptimizationParametersRoot): Calculation Parameters. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
PutAndOptimizeResponseWrapper
Response Object
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=False)
kwargs['id'] = \
id
return self.put_and_optimize_endpoint.call_with_http_info(**kwargs)
def put_and_optimize_with_http_info(
self,
id,
**kwargs
) -> typing.Tuple[PutAndOptimizeResponseWrapper, int, typing.MutableMapping]:
"""Create or Update Axioma optimization and run it. # noqa: E501
This endpoint updates and run the Axioma optimization specified in the PUT body parameters. It also allows the creation of new Axioma optimization with custom id. # noqa: E501
This method makes a synchronous HTTP request. Returns http data, http status and headers
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
x_fact_set_api_long_running_deadline (int): Long running deadline in seconds.. [optional]
cache_control (str): Standard HTTP header. Accepts no-cache, no-store, max-age, max-stale.. [optional]
axioma_equity_optimization_parameters_root (AxiomaEquityOptimizationParametersRoot): Calculation Parameters. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
PutAndOptimizeResponseWrapper
Response Object
int
Http Status Code
dict
Dictionary of the response headers
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=False)
kwargs['id'] = \
id
return self.put_and_optimize_endpoint.call_with_http_info(**kwargs)
def put_and_optimize_async(
self,
id,
**kwargs
) -> "ApplyResult[PutAndOptimizeResponseWrapper]":
"""Create or Update Axioma optimization and run it. # noqa: E501
This endpoint updates and run the Axioma optimization specified in the PUT body parameters. It also allows the creation of new Axioma optimization with custom id. # noqa: E501
This method makes a asynchronous HTTP request. Returns the http data, wrapped in ApplyResult
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
x_fact_set_api_long_running_deadline (int): Long running deadline in seconds.. [optional]
cache_control (str): Standard HTTP header. Accepts no-cache, no-store, max-age, max-stale.. [optional]
axioma_equity_optimization_parameters_root (AxiomaEquityOptimizationParametersRoot): Calculation Parameters. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[PutAndOptimizeResponseWrapper]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=True, async_req=True)
kwargs['id'] = \
id
return self.put_and_optimize_endpoint.call_with_http_info(**kwargs)
def put_and_optimize_with_http_info_async(
self,
id,
**kwargs
) -> "ApplyResult[typing.Tuple[PutAndOptimizeResponseWrapper, int, typing.MutableMapping]]":
"""Create or Update Axioma optimization and run it. # noqa: E501
This endpoint updates and run the Axioma optimization specified in the PUT body parameters. It also allows the creation of new Axioma optimization with custom id. # noqa: E501
This method makes a asynchronous HTTP request. Returns http data, http status and headers, wrapped in ApplyResult
Args:
id (str): from url, provided from the location header in the Create and Run Axioma optimization endpoint
Keyword Args:
x_fact_set_api_long_running_deadline (int): Long running deadline in seconds.. [optional]
cache_control (str): Standard HTTP header. Accepts no-cache, no-store, max-age, max-stale.. [optional]
axioma_equity_optimization_parameters_root (AxiomaEquityOptimizationParametersRoot): Calculation Parameters. [optional]
_preload_content (bool): if False, the urllib3.HTTPResponse object
will be returned without reading/decoding response data.
Default is True.
_request_timeout (int/float/tuple): timeout setting for this request. If
one number provided, it will be total request timeout. It can also
be a pair (tuple) of (connection, read) timeouts.
Default is None.
_check_input_type (bool): specifies if type checking
should be done one the data sent to the server.
Default is True.
_check_return_type (bool): specifies if type checking
should be done one the data received from the server.
Default is True.
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_content_type (str/None): force body content-type.
Default is None and content-type will be predicted by allowed
content-types and body.
_host_index (int/None): specifies the index of the server
that we want to use.
Default is read from the configuration.
Returns:
ApplyResult[(PutAndOptimizeResponseWrapper, int, typing.Dict)]
"""
self.apply_kwargs_defaults(kwargs=kwargs, return_http_data_only=False, async_req=True)
kwargs['id'] = \
id
return self.put_and_optimize_endpoint.call_with_http_info(**kwargs)
| 46.753939
| 333
| 0.592023
| 8,611
| 77,144
| 5.158634
| 0.034375
| 0.029175
| 0.021071
| 0.020531
| 0.959434
| 0.948831
| 0.945972
| 0.942595
| 0.941964
| 0.939848
| 0
| 0.005794
| 0.346728
| 77,144
| 1,649
| 334
| 46.782292
| 0.875645
| 0.599256
| 0
| 0.708723
| 1
| 0
| 0.167945
| 0.073929
| 0
| 0
| 0
| 0
| 0
| 1
| 0.056075
| false
| 0
| 0.017134
| 0
| 0.127726
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 8
|
1cacfaca1c7026296e12109b3d3f88ea55141841
| 23,395
|
py
|
Python
|
etcmodel/feature_utils_test.py
|
muell-monster/google-research
|
04d2024f4723bc4be3d639a668c19fb1f6a31478
|
[
"Apache-2.0"
] | 3
|
2021-01-18T04:46:49.000Z
|
2021-03-05T09:21:40.000Z
|
etcmodel/feature_utils_test.py
|
Alfaxad/google-research
|
2c0043ecd507e75e2df9973a3015daf9253e1467
|
[
"Apache-2.0"
] | 7
|
2021-11-10T19:44:38.000Z
|
2022-02-10T06:48:39.000Z
|
etcmodel/feature_utils_test.py
|
Alfaxad/google-research
|
2c0043ecd507e75e2df9973a3015daf9253e1467
|
[
"Apache-2.0"
] | 4
|
2021-02-08T10:25:45.000Z
|
2021-04-17T14:46:26.000Z
|
# coding=utf-8
# Copyright 2020 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for feature utils."""
from absl.testing import parameterized
import numpy as np
import tensorflow as tf
from etcmodel import feature_utils
class TensorUtilsTest(tf.test.TestCase, parameterized.TestCase):
def test_relative_position_generator_init(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=3)
self.assertEqual(3, relative_pos_gen.max_distance)
self.assertEqual(False, relative_pos_gen.ignore_direction)
self.assertEqual(7, relative_pos_gen.relative_vocab_size)
self.assertEqual(6, relative_pos_gen.left_pad_value)
self.assertEqual(3, relative_pos_gen.right_pad_value)
def test_relative_position_generator_init_ignore_direction(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(
max_distance=3, ignore_direction=True)
self.assertEqual(3, relative_pos_gen.max_distance)
self.assertEqual(True, relative_pos_gen.ignore_direction)
self.assertEqual(4, relative_pos_gen.relative_vocab_size)
self.assertEqual(3, relative_pos_gen.left_pad_value)
self.assertEqual(3, relative_pos_gen.right_pad_value)
def test_relative_position_generator_init_max_distance_0(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=0)
self.assertEqual(0, relative_pos_gen.max_distance)
self.assertEqual(False, relative_pos_gen.ignore_direction)
self.assertEqual(1, relative_pos_gen.relative_vocab_size)
self.assertEqual(0, relative_pos_gen.left_pad_value)
self.assertEqual(0, relative_pos_gen.right_pad_value)
def test_relative_position_generator_init_invalid_arguments(self):
with self.assertRaises(ValueError):
feature_utils.RelativePositionGenerator(max_distance=-1)
def test_make_relative_att_ids_padding_case(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=3)
expected = [[
[0, 1, 2, 3, 3, 3], #
[4, 0, 1, 2, 3, 3], #
[5, 4, 0, 1, 2, 3], #
[6, 5, 4, 0, 1, 2], #
[6, 6, 5, 4, 0, 1], #
[6, 6, 6, 5, 4, 0], #
]]
self.assertAllEqual(expected, relative_pos_gen.make_relative_att_ids(6))
def test_make_relative_att_ids_padding_case_ignore_direction(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(
max_distance=3, ignore_direction=True)
expected = [[
[0, 1, 2, 3, 3, 3], #
[1, 0, 1, 2, 3, 3], #
[2, 1, 0, 1, 2, 3], #
[3, 2, 1, 0, 1, 2], #
[3, 3, 2, 1, 0, 1], #
[3, 3, 3, 2, 1, 0], #
]]
self.assertAllEqual(expected, relative_pos_gen.make_relative_att_ids(6))
def test_make_relative_att_ids_trimming_case(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=9)
expected = [[
[0, 1, 2, 3, 4], #
[10, 0, 1, 2, 3], #
[11, 10, 0, 1, 2], #
[12, 11, 10, 0, 1], #
[13, 12, 11, 10, 0], #
]]
self.assertAllEqual(expected, relative_pos_gen.make_relative_att_ids(5))
def test_make_relative_att_ids_no_pad_or_trim_case(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=4)
expected = [[
[0, 1, 2, 3, 4], #
[5, 0, 1, 2, 3], #
[6, 5, 0, 1, 2], #
[7, 6, 5, 0, 1], #
[8, 7, 6, 5, 0], #
]]
self.assertAllEqual(expected, relative_pos_gen.make_relative_att_ids(5))
def test_make_relative_att_ids_max_distance_0(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=0)
expected = [[
[0, 0, 0, 0], #
[0, 0, 0, 0], #
[0, 0, 0, 0], #
[0, 0, 0, 0], #
]]
self.assertAllEqual(expected, relative_pos_gen.make_relative_att_ids(4))
def test_make_relative_att_ids_batch_size_2(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=3)
expected = [
[
[0, 1, 2, 3, 3], #
[4, 0, 1, 2, 3], #
[5, 4, 0, 1, 2], #
[6, 5, 4, 0, 1], #
[6, 6, 5, 4, 0], #
],
[
[0, 1, 2, 3, 3], #
[4, 0, 1, 2, 3], #
[5, 4, 0, 1, 2], #
[6, 5, 4, 0, 1], #
[6, 6, 5, 4, 0], #
]
]
self.assertAllEqual(
expected,
relative_pos_gen.make_relative_att_ids(seq_len=5, batch_size=2))
def test_make_relative_att_ids_batch_size_2_tensor(self):
dummy_batch = tf.ones([2, 5])
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=3)
expected = [
[
[0, 1, 2, 3, 3], #
[4, 0, 1, 2, 3], #
[5, 4, 0, 1, 2], #
[6, 5, 4, 0, 1], #
[6, 6, 5, 4, 0], #
],
[
[0, 1, 2, 3, 3], #
[4, 0, 1, 2, 3], #
[5, 4, 0, 1, 2], #
[6, 5, 4, 0, 1], #
[6, 6, 5, 4, 0], #
]
]
self.assertAllEqual(
expected,
relative_pos_gen.make_relative_att_ids(
seq_len=5, batch_size=tf.shape(dummy_batch)[0]))
def test_overwrite_relative_att_ids_outside_segments(self):
# batch_size = 2, seq_len = 5, max_distance = 3
rel_att_ids = [
[
[0, 1, 2, 3, 3], #
[4, 0, 1, 2, 3], #
[5, 4, 0, 1, 2], #
[6, 5, 4, 0, 1], #
[6, 6, 5, 4, 0], #
],
[
[0, 1, 2, 3, 3], #
[4, 0, 1, 2, 3], #
[5, 4, 0, 1, 2], #
[6, 5, 4, 0, 1], #
[6, 6, 5, 4, 0], #
]
]
segment_ids = [[10, 10, 20, 30, 30], [10, 20, 20, 10, 10]]
overwrite_value = 100
expected_rel_att_ids = [
[
[0, 1, 100, 100, 100], #
[4, 0, 100, 100, 100], #
[100, 100, 0, 100, 100], #
[100, 100, 100, 0, 1], #
[100, 100, 100, 4, 0], #
],
[
[0, 100, 100, 3, 3], #
[100, 0, 1, 100, 100], #
[100, 4, 0, 100, 100], #
[6, 100, 100, 0, 1], #
[6, 100, 100, 4, 0], #
]
]
self.assertAllEqual(
expected_rel_att_ids,
feature_utils.overwrite_relative_att_ids_outside_segments(
rel_att_ids=rel_att_ids,
segment_ids=segment_ids,
overwrite_value=overwrite_value))
def test_make_relative_att_ids_invalid_arguments(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=3)
with self.assertRaises(ValueError):
relative_pos_gen.make_relative_att_ids(0)
with self.assertRaises(ValueError):
relative_pos_gen.make_relative_att_ids(seq_len=5, batch_size=0)
def test_make_local_relative_att_ids_padding_case(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=3)
expected = [[
[6, 6, 6, 5, 4, 0, 1, 2, 3, 3, 3], #
[6, 6, 6, 5, 4, 0, 1, 2, 3, 3, 3], #
[6, 6, 6, 5, 4, 0, 1, 2, 3, 3, 3], #
[6, 6, 6, 5, 4, 0, 1, 2, 3, 3, 3], #
]]
self.assertAllEqual(
expected,
relative_pos_gen.make_local_relative_att_ids(seq_len=4, local_radius=5))
def test_make_local_relative_att_ids_padding_case_ignore_direction(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(
max_distance=3, ignore_direction=True)
expected = [[
[3, 3, 3, 2, 1, 0, 1, 2, 3, 3, 3], #
[3, 3, 3, 2, 1, 0, 1, 2, 3, 3, 3], #
[3, 3, 3, 2, 1, 0, 1, 2, 3, 3, 3], #
[3, 3, 3, 2, 1, 0, 1, 2, 3, 3, 3], #
]]
self.assertAllEqual(
expected,
relative_pos_gen.make_local_relative_att_ids(seq_len=4, local_radius=5))
def test_make_local_relative_att_ids_trimming_case(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=9)
expected = [[
[13, 12, 11, 10, 0, 1, 2, 3, 4], #
[13, 12, 11, 10, 0, 1, 2, 3, 4], #
[13, 12, 11, 10, 0, 1, 2, 3, 4], #
]]
self.assertAllEqual(
expected,
relative_pos_gen.make_local_relative_att_ids(seq_len=3, local_radius=4))
def test_make_local_relative_att_ids_no_pad_or_trim_case(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=4)
expected = [[
[8, 7, 6, 5, 0, 1, 2, 3, 4], #
[8, 7, 6, 5, 0, 1, 2, 3, 4], #
[8, 7, 6, 5, 0, 1, 2, 3, 4], #
]]
self.assertAllEqual(
expected,
relative_pos_gen.make_local_relative_att_ids(seq_len=3, local_radius=4))
def test_make_local_relative_att_ids_max_distance_0(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=0)
expected = [[
[0, 0, 0, 0, 0], #
[0, 0, 0, 0, 0], #
]]
self.assertAllEqual(
expected,
relative_pos_gen.make_local_relative_att_ids(seq_len=2, local_radius=2))
def test_make_local_relative_att_ids_batch_size_2(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=3)
expected = [
[
[6, 6, 5, 4, 0, 1, 2, 3, 3], #
[6, 6, 5, 4, 0, 1, 2, 3, 3], #
[6, 6, 5, 4, 0, 1, 2, 3, 3], #
],
[
[6, 6, 5, 4, 0, 1, 2, 3, 3], #
[6, 6, 5, 4, 0, 1, 2, 3, 3], #
[6, 6, 5, 4, 0, 1, 2, 3, 3], #
],
]
self.assertAllEqual(
expected,
relative_pos_gen.make_local_relative_att_ids(
seq_len=3, local_radius=4, batch_size=2))
def test_make_local_relative_att_ids_batch_size_2_tensor(self):
dummy_batch = tf.ones([2, 5])
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=3)
expected = [
[
[6, 6, 5, 4, 0, 1, 2, 3, 3], #
[6, 6, 5, 4, 0, 1, 2, 3, 3], #
[6, 6, 5, 4, 0, 1, 2, 3, 3], #
],
[
[6, 6, 5, 4, 0, 1, 2, 3, 3], #
[6, 6, 5, 4, 0, 1, 2, 3, 3], #
[6, 6, 5, 4, 0, 1, 2, 3, 3], #
],
]
self.assertAllEqual(
expected,
relative_pos_gen.make_local_relative_att_ids(
seq_len=3, local_radius=4, batch_size=tf.shape(dummy_batch)[0]))
def test_make_local_relative_att_ids_invalid_arguments(self):
relative_pos_gen = feature_utils.RelativePositionGenerator(max_distance=3)
with self.assertRaises(ValueError):
relative_pos_gen.make_local_relative_att_ids(seq_len=0, local_radius=3)
with self.assertRaises(ValueError):
relative_pos_gen.make_local_relative_att_ids(seq_len=5, local_radius=0)
with self.assertRaises(ValueError):
relative_pos_gen.make_local_relative_att_ids(
seq_len=5, local_radius=3, batch_size=0)
def test_make_att_mask_from_input_mask(self):
input_mask = [
[1, 1, 1, 0, 0, 0],
[1, 1, 1, 1, 1, 1],
[1, 1, 1, 1, 1, 0],
[0, 0, 0, 0, 0, 0],
]
expected = [
[
[1, 1, 1, 0, 0, 0], #
[1, 1, 1, 0, 0, 0], #
[1, 1, 1, 0, 0, 0], #
[0, 0, 0, 1, 1, 1], #
[0, 0, 0, 1, 1, 1], #
[0, 0, 0, 1, 1, 1], #
], #
[
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
], #
[
[1, 1, 1, 1, 1, 0], #
[1, 1, 1, 1, 1, 0], #
[1, 1, 1, 1, 1, 0], #
[1, 1, 1, 1, 1, 0], #
[1, 1, 1, 1, 1, 0], #
[0, 0, 0, 0, 0, 1], #
], #
[
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
], #
]
self.assertAllEqual(expected,
feature_utils.make_att_mask_from_input_mask(input_mask))
def test_make_segmented_att_mask(self):
segment_ids = [
[0, 0, 1, 1, 0, 0],
[2, 2, 2, 2, 2, 2],
[0, 0, 3, 0, 3, 0],
[0, 5, 4, 3, 2, 1],
]
expected = [
[
[1, 1, 0, 0, 1, 1], #
[1, 1, 0, 0, 1, 1], #
[0, 0, 1, 1, 0, 0], #
[0, 0, 1, 1, 0, 0], #
[1, 1, 0, 0, 1, 1], #
[1, 1, 0, 0, 1, 1], #
], #
[
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
], #
[
[1, 1, 0, 1, 0, 1], #
[1, 1, 0, 1, 0, 1], #
[0, 0, 1, 0, 1, 0], #
[1, 1, 0, 1, 0, 1], #
[0, 0, 1, 0, 1, 0], #
[1, 1, 0, 1, 0, 1], #
], #
[
[1, 0, 0, 0, 0, 0], #
[0, 1, 0, 0, 0, 0], #
[0, 0, 1, 0, 0, 0], #
[0, 0, 0, 1, 0, 0], #
[0, 0, 0, 0, 1, 0], #
[0, 0, 0, 0, 0, 1], #
], #
]
self.assertAllEqual(expected,
feature_utils.make_segmented_att_mask(segment_ids))
def test_make_att_mask_from_breakpoints(self):
att_breakpoints = [
[0, 1, 0, 1, 0, 0],
[0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1],
]
expected = [
[
[1, 1, 0, 0, 0, 0], #
[1, 1, 0, 0, 0, 0], #
[0, 0, 1, 1, 0, 0], #
[0, 0, 1, 1, 0, 0], #
[0, 0, 0, 0, 1, 1], #
[0, 0, 0, 0, 1, 1], #
], #
[
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
], #
[
[1, 0, 0, 0, 0, 0], #
[0, 1, 1, 1, 1, 1], #
[0, 1, 1, 1, 1, 1], #
[0, 1, 1, 1, 1, 1], #
[0, 1, 1, 1, 1, 1], #
[0, 1, 1, 1, 1, 1], #
], #
[
[1, 0, 0, 0, 0, 0], #
[0, 1, 0, 0, 0, 0], #
[0, 0, 1, 0, 0, 0], #
[0, 0, 0, 1, 0, 0], #
[0, 0, 0, 0, 1, 0], #
[0, 0, 0, 0, 0, 1], #
], #
]
self.assertAllEqual(
expected, feature_utils.make_att_mask_from_breakpoints(att_breakpoints))
def test_make_att_mask_from_breakpoints_use_starting_breakpoints(self):
att_breakpoints = [
[0, 0, 1, 0, 1, 0],
[0, 0, 0, 0, 0, 0],
[1, 0, 0, 0, 0, 1],
[1, 1, 1, 1, 1, 1],
]
expected = [
[
[1, 1, 0, 0, 0, 0], #
[1, 1, 0, 0, 0, 0], #
[0, 0, 1, 1, 0, 0], #
[0, 0, 1, 1, 0, 0], #
[0, 0, 0, 0, 1, 1], #
[0, 0, 0, 0, 1, 1], #
], #
[
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1], #
], #
[
[1, 1, 1, 1, 1, 0], #
[1, 1, 1, 1, 1, 0], #
[1, 1, 1, 1, 1, 0], #
[1, 1, 1, 1, 1, 0], #
[1, 1, 1, 1, 1, 0], #
[0, 0, 0, 0, 0, 1], #
], #
[
[1, 0, 0, 0, 0, 0], #
[0, 1, 0, 0, 0, 0], #
[0, 0, 1, 0, 0, 0], #
[0, 0, 0, 1, 0, 0], #
[0, 0, 0, 0, 1, 0], #
[0, 0, 0, 0, 0, 1], #
], #
]
self.assertAllEqual(
expected,
feature_utils.make_att_mask_from_breakpoints(
att_breakpoints, use_starting_breakpoints=True))
def test_make_local_segmented_att_mask(self):
segment_ids = [
[0, 0, 1, 0, 1, 0, 1, 1],
[2, 2, 2, 2, 2, 2, 2, 2],
[4, 3, 3, 3, 4, 1, 1, 1],
[0, 6, 5, 4, 3, 2, 1, 0],
]
expected = [
[
[0, 0, 1, 1, 0], #
[0, 1, 1, 0, 1], #
[0, 0, 1, 0, 1], #
[1, 0, 1, 0, 1], #
[1, 0, 1, 0, 1], #
[1, 0, 1, 0, 0], #
[1, 0, 1, 1, 0], #
[0, 1, 1, 0, 0], #
], #
[
[0, 0, 1, 1, 1], #
[0, 1, 1, 1, 1], #
[1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1], #
[1, 1, 1, 1, 0], #
[1, 1, 1, 0, 0], #
], #
[
[0, 0, 1, 0, 0], #
[0, 0, 1, 1, 1], #
[0, 1, 1, 1, 0], #
[1, 1, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 1, 1], #
[0, 1, 1, 1, 0], #
[1, 1, 1, 0, 0], #
], #
[
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
], #
]
self.assertAllEqual(
expected,
feature_utils.make_local_segmented_att_mask(
segment_ids, local_radius=2))
def test_make_local_segmented_att_mask_uneven_blocking_case(self):
segment_ids = [
[0, 0, 1, 0, 1, 0, 1, 1, 2, 2],
[0, 0, 0, 0, 0, 0, 0, 0, 0, 0],
]
expected = [
[
[0, 0, 0, 1, 1, 0, 1], #
[0, 0, 1, 1, 0, 1, 0], #
[0, 0, 0, 1, 0, 1, 0], #
[1, 1, 0, 1, 0, 1, 0], #
[0, 1, 0, 1, 0, 1, 1], #
[0, 1, 0, 1, 0, 0, 0], #
[0, 1, 0, 1, 1, 0, 0], #
[1, 0, 1, 1, 0, 0, 0], #
[0, 0, 0, 1, 1, 0, 0], #
[0, 0, 1, 1, 0, 0, 0], #
], #
[
[0, 0, 0, 1, 1, 1, 1], #
[0, 0, 1, 1, 1, 1, 1], #
[0, 1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1, 1, 0], #
[1, 1, 1, 1, 1, 0, 0], #
[1, 1, 1, 1, 0, 0, 0], #
], #
]
self.assertAllEqual(
expected,
feature_utils.make_local_segmented_att_mask(
segment_ids, local_radius=3))
def test_make_local_segmented_att_mask_single_block_case(self):
segment_ids = [
[0, 1],
[0, 0],
]
expected = [
[
[0, 0, 0, 1, 0, 0, 0], #
[0, 0, 0, 1, 0, 0, 0], #
], #
[
[0, 0, 0, 1, 1, 0, 0], #
[0, 0, 1, 1, 0, 0, 0], #
], #
]
self.assertAllEqual(
expected,
feature_utils.make_local_segmented_att_mask(
segment_ids, local_radius=3))
def test_make_local_segmented_att_mask_static_shape(self):
# This test is only relevant for TF v1 session mode. If the batch size
# is statically unknown (None), we want to make sure all shapes in the
# output other than batch size are still statically known.
# We use `placeholder_with_default` to simulate the TF v1 situation where
# the static `batch_size` is unknown.
segment_ids = tf.compat.v1.placeholder_with_default(
np.zeros([1, 8]), shape=[None, 8])
local_radius = 2
result = feature_utils.make_local_segmented_att_mask(
segment_ids, local_radius=local_radius)
self.assertAllEqual([8, 2 * local_radius + 1], result.shape.as_list()[1:])
def test_make_local_att_mask_from_breakpoints(self):
att_breakpoints = [
[0, 0, 0, 1, 0, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 1, 1, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
]
expected = [
[
[0, 0, 1, 1, 1], #
[0, 1, 1, 1, 1], #
[1, 1, 1, 1, 0], #
[1, 1, 1, 0, 0], #
[0, 0, 1, 1, 1], #
[0, 1, 1, 1, 1], #
[1, 1, 1, 1, 0], #
[1, 1, 1, 0, 0], #
], #
[
[0, 0, 1, 1, 1], #
[0, 1, 1, 1, 1], #
[1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1], #
[1, 1, 1, 1, 0], #
[1, 1, 1, 0, 0], #
], #
[
[0, 0, 1, 0, 0], #
[0, 0, 1, 1, 1], #
[0, 1, 1, 1, 0], #
[1, 1, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 1, 1], #
[0, 1, 1, 1, 0], #
[1, 1, 1, 0, 0], #
], #
[
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
], #
]
self.assertAllEqual(
expected,
feature_utils.make_local_att_mask_from_breakpoints(
att_breakpoints, local_radius=2))
def test_make_local_att_mask_from_breakpoints_use_starting_breakpoints(self):
att_breakpoints = [
[0, 0, 0, 0, 1, 0, 0, 0],
[0, 0, 0, 0, 0, 0, 0, 0],
[1, 0, 0, 1, 1, 0, 0, 1],
[1, 1, 1, 1, 1, 1, 1, 1],
]
expected = [
[
[0, 0, 1, 1, 1], #
[0, 1, 1, 1, 1], #
[1, 1, 1, 1, 0], #
[1, 1, 1, 0, 0], #
[0, 0, 1, 1, 1], #
[0, 1, 1, 1, 1], #
[1, 1, 1, 1, 0], #
[1, 1, 1, 0, 0], #
], #
[
[0, 0, 1, 1, 1], #
[0, 1, 1, 1, 1], #
[1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1], #
[1, 1, 1, 1, 1], #
[1, 1, 1, 1, 0], #
[1, 1, 1, 0, 0], #
], #
[
[0, 0, 1, 1, 1], #
[0, 1, 1, 1, 0], #
[1, 1, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 1, 1], #
[0, 1, 1, 1, 0], #
[1, 1, 1, 0, 0], #
[0, 0, 1, 0, 0], #
], #
[
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
[0, 0, 1, 0, 0], #
], #
]
self.assertAllEqual(
expected,
feature_utils.make_local_att_mask_from_breakpoints(
att_breakpoints, local_radius=2, use_starting_breakpoints=True))
if __name__ == '__main__':
tf.test.main()
| 30.501956
| 80
| 0.432828
| 3,446
| 23,395
| 2.759431
| 0.054846
| 0.121569
| 0.145126
| 0.161111
| 0.847092
| 0.825849
| 0.799979
| 0.780524
| 0.743927
| 0.72363
| 0
| 0.158635
| 0.395084
| 23,395
| 766
| 81
| 30.541775
| 0.513284
| 0.040778
| 0
| 0.673163
| 0
| 0
| 0.000362
| 0
| 0
| 0
| 0
| 0
| 0.068966
| 1
| 0.046477
| false
| 0
| 0.005997
| 0
| 0.053973
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 1
| 1
| 1
| 1
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 7
|
1cc4ddca6c7a82d91eeac0290ee0f12f12db8238
| 99
|
py
|
Python
|
Codes/Humanoid Robots Semantic Segmentation/humanoid_soccer_dataset/__init__.py
|
AmiirGholamii/semantic-segmentation
|
16426afdcf9ef2449d5bc3cb86ca1c269e517dab
|
[
"MIT"
] | 2
|
2021-05-14T07:44:24.000Z
|
2021-05-19T04:48:03.000Z
|
humanoid_soccer_dataset/__init__.py
|
AmiirGholamii/semantic-segmentation
|
16426afdcf9ef2449d5bc3cb86ca1c269e517dab
|
[
"MIT"
] | null | null | null |
humanoid_soccer_dataset/__init__.py
|
AmiirGholamii/semantic-segmentation
|
16426afdcf9ef2449d5bc3cb86ca1c269e517dab
|
[
"MIT"
] | null | null | null |
"""humanoid_soccer_dataset dataset."""
from .humanoid_soccer_dataset import HumanoidSoccerDataset
| 24.75
| 58
| 0.848485
| 10
| 99
| 8
| 0.6
| 0.35
| 0.525
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.070707
| 99
| 3
| 59
| 33
| 0.869565
| 0.323232
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1cdbaaae0185213a59a576e385a4fcdf93cf5826
| 147
|
py
|
Python
|
venv/lib/python3.9/site-packages/vmo/__init__.py
|
nitin-hugar/ForestMIR
|
6fc2731c0364c3fb89661df0c94346d51fb59d56
|
[
"MIT"
] | 1
|
2022-02-15T05:14:14.000Z
|
2022-02-15T05:14:14.000Z
|
venv/lib/python3.9/site-packages/vmo/__init__.py
|
nitin-hugar/ForestMIR
|
6fc2731c0364c3fb89661df0c94346d51fb59d56
|
[
"MIT"
] | null | null | null |
venv/lib/python3.9/site-packages/vmo/__init__.py
|
nitin-hugar/ForestMIR
|
6fc2731c0364c3fb89661df0c94346d51fb59d56
|
[
"MIT"
] | null | null | null |
"""Top-level module for vmo"""
from .VMO.oracle import find_threshold
from .VMO.oracle import create_oracle
from .VMO.oracle import build_oracle
| 21
| 38
| 0.789116
| 23
| 147
| 4.913043
| 0.521739
| 0.185841
| 0.345133
| 0.504425
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.122449
| 147
| 6
| 39
| 24.5
| 0.875969
| 0.163265
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 1
| 0
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 1
| 0
| 1
| 0
|
0
| 7
|
1cfa102a790cd55778b849ea93adcd2533c2aca0
| 117
|
py
|
Python
|
modules/core/installer/module_core/__init__.py
|
groboclown/whimbrel
|
1968cccf4888ef893686a812ed729205a31d2a12
|
[
"Apache-2.0"
] | null | null | null |
modules/core/installer/module_core/__init__.py
|
groboclown/whimbrel
|
1968cccf4888ef893686a812ed729205a31d2a12
|
[
"Apache-2.0"
] | null | null | null |
modules/core/installer/module_core/__init__.py
|
groboclown/whimbrel
|
1968cccf4888ef893686a812ed729205a31d2a12
|
[
"Apache-2.0"
] | null | null | null |
from .schema import CORE_DB_TABLES
def get_schema():
return CORE_DB_TABLES
def get_lambdas():
return {}
| 10.636364
| 34
| 0.709402
| 17
| 117
| 4.529412
| 0.588235
| 0.155844
| 0.311688
| 0.38961
| 0.467532
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.213675
| 117
| 10
| 35
| 11.7
| 0.836957
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.4
| true
| 0
| 0.2
| 0.4
| 1
| 0
| 1
| 0
| 0
| null | 0
| 1
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 1
| 0
| 0
| 1
| 1
| 0
|
0
| 7
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.